summaryrefslogtreecommitdiff
path: root/app/photos/retriever.py
diff options
context:
space:
mode:
Diffstat (limited to 'app/photos/retriever.py')
-rw-r--r--app/photos/retriever.py465
1 files changed, 128 insertions, 337 deletions
diff --git a/app/photos/retriever.py b/app/photos/retriever.py
index 2a98d09..959b908 100644
--- a/app/photos/retriever.py
+++ b/app/photos/retriever.py
@@ -26,90 +26,129 @@ except ImportError:
from PIL import ImageEnhance
except ImportError:
raise ImportError("Could not import the Python Imaging Library.")
-
+
+ImageFile.MAXBLOCK = 1000000
from utils.strutils import safestr
from photos.models import Photo,PhotoGallery
+# from https://github.com/alexis-mignon/python-flickr-api
+# terribly documented, but offers a good clean OOP approach if you're willing to figure it out...
+import flickr_api
-# Flickr Sync stuffs
-API_KEY = settings.FLICKR_API_KEY
-from utils.APIClients import FlickrClient
-EXIF_PARAMS = {"Aperture":'f/2.8',"Make":'Apple',"Model":'iPhone',"Exposure":'',"ISO Speed":'',"Focal Length":'',"Shutter Speed":'',"Lens":'','Date and Time (Original)':'2008:07:03 22:44:25'}
+EXIF_PARAMS = {"FNumber":'f/2.8',"Make":'Apple',"Model":'iPhone',"ExposureTime":'',"ISO":'',"FocalLength":'', "LensModel":'','DateTimeOriginal':'2013:09:03 22:44:25'}
def sync_flickr_photos(*args, **kwargs):
- cur_page = 1 # Start on the first page of the stream
- paginate_by = 100 # Get 100 photos at a time
- dupe = False # Set our dupe flag for the following loop
- BASE_PATH = 'http://flickr.com/services/rest/'
- client = FlickrClient(BASE_PATH, API_KEY)
- data = client.flickr_people_getPublicPhotos(user_id=settings.FLICKR_USER_ID, page=cur_page, per_page=paginate_by,extras="date_upload,date_taken,geo")
- #photos.photos.photo.reverse()
- for post in data.findall('photos/photo'):
- info = dict((k, smart_unicode(post.get(k))) for k in post.keys())
+ flickr_api.set_keys(api_key =settings.FLICKR_API_KEY, api_secret=settings.FLICKR_API_SECRET)
+ flickr_api.set_auth_handler("app/photos/flickrauth")
+ user = flickr_api.test.login()
+ photos = user.getPhotos(extras="date_upload,date_taken,geo")
+ #reverse! reverse!
+ photos.reverse()
+ for photo in photos:
+ info = photo.getInfo()
try:
row = Photo.objects.get(flickr_id=info['id'], flickr_secret=info['secret'])
# If the row exists already, set the dupe flag
dupe = True
print 'already have '+info['id']+' moving on'
except ObjectDoesNotExist:
- #for debugging:
- print info['title']
- taglist = []
- location, region = get_geo(float(info['latitude']),float(info['longitude']))
- details = client.flickr_photos_getInfo(user_id=settings.FLICKR_USER_ID, photo_id=force_unicode(info['id']))
- for t in details.findall('photo/tags/tag'):
- tag = dict((k, smart_unicode(t.get(k))) for k in t.keys())
- taglist.append(tag['raw'])
- exif = exif_handler(client.flickr_photos_getExif(user_id=API_KEY, photo_id=safestr(info['id'])))
-
- photo, created = Photo.objects.get_or_create(
- title = info['title'],
- flickr_id = info['id'],
- flickr_owner = info['owner'],
- flickr_server = info['server'],
- flickr_secret = info['secret'],
- flickr_originalsecret = force_unicode(details[0].attrib['originalsecret']),
- flickr_farm = info['farm'],
- pub_date = flickr_datetime_to_datetime(info['datetaken']),
- description = force_unicode(details[0].findtext('description')),
- exif_aperture = exif['Aperture'],
- exif_make = exif['Make'],
- exif_model = exif['Model'],
- exif_exposure = exif['Exposure'],
- exif_iso = exif['ISO Speed'],
- exif_lens = exif['Lens'],
- exif_focal_length = exif['Focal Length'],
- exif_date = flickr_datetime_to_datetime(exif["Date and Time (Original)"].replace(':', '-', 2)),
- lat = float(info['latitude']),
- lon = float(info['longitude']),
- region = region,
- location = location,
- )
- if created:
- for t in taglist:
- photo.tags.add(t)
- #print info['title'], region, location
- photo.save()
- make_local_copies(photo)
- slideshow_image(photo)
-
+ get_photo(photo)
+
+def get_photo(photo):
+ info = photo.getInfo()
+ geo = photo.getLocation()
+ location, region = get_geo(float(geo['latitude']),float(geo['longitude']))
+ exif = exif_handler(photo.getExif())
+ p, created = Photo.objects.get_or_create(
+ title = info['title'],
+ flickr_id = info['id'],
+ flickr_owner = info['owner']['id'],
+ flickr_server = info['server'],
+ flickr_secret = info['secret'],
+ flickr_originalsecret = info['originalsecret'],
+ flickr_farm = info['farm'],
+ pub_date = flickr_datetime_to_datetime(info['taken']),
+ description = info['description'],
+ exif_aperture = exif['FNumber'],
+ exif_make = exif['Make'],
+ exif_model = exif['Model'],
+ exif_exposure = exif['ExposureTime'],
+ exif_iso = exif['ISO'],
+ exif_lens = exif['LensModel'],
+ exif_focal_length = exif['FocalLength'],
+ exif_date = flickr_datetime_to_datetime(exif["DateTimeOriginal"].replace(':', '-', 2)),
+ lat = float(geo['latitude']),
+ lon = float(geo['longitude']),
+ region = region,
+ location = location,
+ )
+ if created:
+ for tag in info['tags']:
+ p.tags.add(tag['raw'])
+ p.save()
+ make_local_copies(p)
+ #retina image:
+ #slideshow_image(p, 2000, 1600, 75)
+ #normal image
+ print p.title
+ return p
+
+
+def sync_sets(*args, **kwargs):
+ flickr_api.set_keys(api_key =settings.FLICKR_API_KEY, api_secret=settings.FLICKR_API_SECRET)
+ flickr_api.set_auth_handler("app/photos/flickrauth")
+ user = flickr_api.test.login()
+ photosets = user.getPhotosets()
+ #reverse! reverse!
+ photosets.reverse()
+ disregard = ['POTD 2008','Snow Day','Wedding','Some random stuff','Lilah & Olivia', '6 months+', '6-9 months','9-18 months']
+ for photoset in photosets:
+ if photoset['title'] in disregard:
+ pass
+ else:
+ try:
+ row = PhotoGallery.objects.get(set_id__exact=photoset['id'])
+ print '%s %s %s' %('already have', row.set_title, 'moving on...')
+ # okay it already exists, but is it up-to-date?
+ #get_photos_in_set(row,set.id)
+ except ObjectDoesNotExist:
+ s = PhotoGallery.objects.create (
+ set_id = force_unicode(photoset['id']),
+ set_title = force_unicode(photoset['title']),
+ set_desc = force_unicode(photoset['description']),
+ set_slug = slugify(force_unicode(photoset['title'])),
+ primary = force_unicode(photoset['primary']),
+ pub_date = datetime.datetime.fromtimestamp(float(photoset['date_create']))
+ )
+
+ get_photos_in_set(photoset, s)
+ #create the gallery thumbnail image:
+ photo = Photo.objects.get(flickr_id__exact=str(photoset['primary']))
+ make_gallery_thumb(photo,s)
+
+def get_photos_in_set(flickr_photoset, photoset):
+ for photo in flickr_photoset.getPhotos():
+ try:
+ p = Photo.objects.get(flickr_id__exact=str(photo['id']))
+ except ObjectDoesNotExist:
+ p = get_photo(photo)
+ print p.title
+ photoset.photos.add(p)
+ slideshow_image(p, 1000, 800, 95)
+
+
+################################################
+## Various meta data and geo helper functions ##
+################################################
+
def exif_handler(data):
converted = {}
try:
- for t in data.findall('photo/exif'):
- e = dict((k, smart_unicode(t.get(k))) for k in t.keys())
- if safestr(e['label']) == "Aperture":
- if not converted.has_key("Aperture"):
- converted["Aperture"] = safestr(t.findtext('clean'))
- else:
- if safestr(e['tag']) != 'Exposure':
- if safestr(e['tag']) == 'ExposureTime':
- converted[safestr(e['label'])] = safestr(t.findtext('clean'))
- else:
- converted[safestr(e['label'])] = safestr(t.findtext('raw'))
+ for t in data:
+ converted[t['tag']] = t['raw']
except:
pass
for k,v in EXIF_PARAMS.items():
@@ -137,53 +176,60 @@ def get_geo(lat,lon):
except Region.DoesNotExist:
region = None
return location, region
-
-ImageFile.MAXBLOCK = 1000000
+#######################################################################
+## Photo retrieval functions to pull down images from Flickr servers ##
+#######################################################################
-def slideshow_image(photo):
+def slideshow_image(photo,max_width, max_height, quality):
slide_dir = settings.IMAGES_ROOT + '/slideshow/'+ photo.pub_date.strftime("%Y")
if not os.path.isdir(slide_dir):
os.makedirs(slide_dir)
- med = photo.get_original_url()
- fname = urllib.urlopen(med)
+
+ # Is it a retina image or not?
+ if max_width >= 1001 or max_height >= 801:
+ filename = '%s/%sx2.jpg' %(slide_dir, photo.flickr_id)
+ else:
+ filename = '%s/%s.jpg' %(slide_dir, photo.flickr_id)
+
+ flickr_photo = photo.get_original_url()
+ fname = urllib.urlopen(flickr_photo)
im = cStringIO.StringIO(fname.read()) # constructs a StringIO holding the image
img = Image.open(im)
cur_width, cur_height = img.size
#if image landscape
if cur_width > cur_height:
- new_width = 1000
+ new_width = max_width
#check to make sure we aren't upsizing
if cur_width > new_width:
ratio = float(new_width)/cur_width
x = (cur_width * ratio)
y = (cur_height * ratio)
resized = img.resize((int(x), int(y)), Image.ANTIALIAS)
- resized_filename = '%s/%s.jpg' %(slide_dir, photo.flickr_id)
- resized.save(resized_filename, 'JPEG', quality=95, optimize=True)
+ resized.save(filename, 'JPEG', quality=quality, optimize=True)
else:
- filename = '%s/%s.jpg' %(slide_dir, photo.flickr_id)
img.save(filename)
else:
#image portrait
- new_height = 800
+ new_height = max_height
#check to make sure we aren't upsizing
if cur_height > new_height:
ratio = float(new_height)/cur_height
x = (cur_width * ratio)
y = (cur_height * ratio)
resized = img.resize((int(x), int(y)), Image.ANTIALIAS)
- resized_filename = '%s/%s.jpg' %(slide_dir, photo.flickr_id)
- resized.save(resized_filename, 'JPEG', quality=95, optimize=True)
+ resized.save(filename, 'JPEG', quality=quality, optimize=True)
else:
- filename = '%s/%s.jpg' %(slide_dir, photo.flickr_id)
img.save(filename)
photo.slideshowimage_width = photo.get_width
photo.slideshowimage_height = photo.get_height
photo.slideshowimage_margintop = photo.get_margin_top
photo.slideshowimage_marginleft = photo.get_margin_left
photo.save()
-
+ #now resize the local copy
+
+
+
def make_local_copies(photo):
orig_dir = settings.IMAGES_ROOT + '/flickr/full/'+ photo.pub_date.strftime("%Y")
if not os.path.isdir(orig_dir):
@@ -215,9 +261,7 @@ def make_local_copies(photo):
img = Image.open(im)
local_med = '%s/%s.jpg' %(med_dir, photo.flickr_id)
img.save(local_med)
-
-
-
+
def make_gallery_thumb(photo,set):
crop_dir = settings.IMAGES_ROOT + '/gallery_thumbs/'
if not os.path.isdir(crop_dir):
@@ -256,257 +300,4 @@ def make_gallery_thumb(photo,set):
#os.unlink(img)
-
-def sync_sets(*args, **kwargs):
- dupe = False # Set our dupe flag for the following loop
- BASE_PATH = 'http://flickr.com/services/rest/'
- client = FlickrClient(BASE_PATH, API_KEY)
- data = client.flickr_photosets_getList(user_id=settings.FLICKR_USER_ID)
- nodes = data.findall('photosets/photoset')
- for post in nodes:
- info = dict((k, smart_unicode(post.get(k))) for k in post.keys())
- try:
- row = PhotoGallery.objects.get(set_id__exact=info['id'])
- # okay it already exists, but is it up-to-date?
- #get_photos_in_set(row,set.id)
-
- except ObjectDoesNotExist:
- disregard = ['POTD 2008','Snow Day','Wedding','Some random stuff']
- if force_unicode(post.findtext('title')) in disregard:
- pass
- else:
- s = PhotoGallery.objects.create (
- set_id = force_unicode(info['id']),
- set_title = force_unicode(post.findtext('title')),
- set_desc = force_unicode(post.findtext('description')),
- set_slug = slugify(force_unicode(post.findtext('title'))),
- primary = force_unicode(info['primary']),
- )
- get_photos_in_set(s)
- #create the gallery thumbnail image:
- photo = Photo.objects.get(flickr_id__exact=str(info['primary']))
- make_gallery_thumb(photo,s)
-
-def get_photos_in_set(set):
- BASE_PATH = 'http://flickr.com/services/rest/'
- client = FlickrClient(BASE_PATH, API_KEY)
- data = client.flickr_photosets_getPhotos(user_id=settings.FLICKR_USER_ID, photoset_id=str(set.set_id))
- for post in data.findall('photoset/photo'):
- info = dict((k, smart_unicode(post.get(k))) for k in post.keys())
- photo = Photo.objects.get(flickr_id__exact=str(info['id']))
- set.photos.add(photo)
-
-
-"""
-def sync_flickr_comments(*args, **kwargs):
- cur_page = 1 # Start on the first page of the stream
- paginate_by = 100 # Get 100 photos at a time
- inc = 1 # Set our dupe flag for the following loop
- # Get our flickr client running
- client = FlickrClient(settings.FLICKR_API_KEY)
- # get total number of photos in stream
- total = client.flickr_people_getInfo(user_id=settings.FLICKR_USER_ID)
- total_num = safestr(total.person.photos.count)
- while (inc < int(total_num)):
- photos = client.flickr_people_getPublicPhotos(user_id=settings.FLICKR_USER_ID, page=cur_page, per_page=paginate_by,extras="date_upload,date_taken,geo")
- incr = 1
- for photo in photos.photos:
- do_photo_comments(photo("id"),photo("secret"))
- inc = inc+1
- incr = incr+1
- print cur_page
- if incr == 100:
- cur_page = cur_page+1
-
-
-
-def do_photo_comments(id, secret):
- # Get our flickr client running
- client = FlickrClient(settings.FLICKR_API_KEY)
- photo = Photo.objects.get(flickr_id=id, flickr_secret=secret)
- comments = client.flickr_photos_comments_getList(user_id=settings.FLICKR_USER_ID, photo_id=id)
- for item in comments.comments:
- try:
- item
- print item('authorname')
- dt = datetime.datetime.fromtimestamp(float(item('datecreate')))
- ctype = ContentType.objects.get_for_model(Photo)
- try:
- f = FreeComment.objects.get(content_type=ctype, object_id=photo.id, submit_date=dt)
- #print f.id
- except ObjectDoesNotExist:
- if safestr(item('authorname')) == 'luxagraf':
- mail = 'hyper@luxagraf.net'
- else:
- mail = slugify(item('authorname'))+'@flickr.com'
- c = FreeComment.objects.create (
- content_type = ctype,
- object_id = photo.id,
- comment = safestr(item),
- person_name = safestr(item('authorname')),
- person_email = mail,
- person_url = item('permalink'),
- is_public = True,
- site_id = 1,
- approved = 0,
- submit_date = dt
- )
-
- except AttributeError:
- pass
-
- def get_country_name(lat,long):
- name = GeoClient.findCountrySubdivision(lat,long, http_proxy=None)
- if name.countrySubdivision.countryCode.PCDATA == "US":
- r_name = "US-%s" %(force_unicode(STATE_LIST[name.countrySubdivision.adminCode1.PCDATA]))
- else:
- r_name = force_unicode(name.countrySubdivision.countryName.PCDATA)
- return r_name
-
-
-def create_size(self, photosize):
- if self.size_exists(photosize):
- return
- if not os.path.isdir(self.cache_path()):
- os.makedirs(self.cache_path())
- try:
- im = Image.open(self.get_image_filename())
- except IOError:
- return
- if im.size == photosize.size():
- shutil.copy(self.get_image_filename(),
- self._get_SIZE_path(photosize))
- return
- cur_width, cur_height = im.size
- new_width, new_height = photosize.size()
- if photosize.crop:
- ratio = max(float(new_width)/cur_width,float(new_height)/cur_height)
- x = (cur_width * ratio)
- y = (cur_height * ratio)
- xd = abs(new_width - x)
- yd = abs(new_height - y)
- x_diff = int(xd / 2)
- y_diff = int(yd / 2)
- if self.crop_from == 'top':
- box = (int(x_diff), 0, int(x_diff+new_width), new_height)
- elif self.crop_from == 'left':
- box = (0, int(y_diff), new_width, int(y_diff+new_height))
- elif self.crop_from == 'bottom':
- box = (int(x_diff), int(yd), int(x_diff+new_width), int(y)) # y - yd = new_height
- elif self.crop_from == 'right':
- box = (int(xd), int(y_diff), int(x), int(y_diff+new_height)) # x - xd = new_width
- else:
- box = (int(x_diff), int(y_diff), int(x_diff+new_width), int(y_diff+new_height))
- resized = im.resize((int(x), int(y)), Image.ANTIALIAS).crop(box)
- else:
- if not new_width == 0 and not new_height == 0:
- if cur_width > cur_height:
- ratio = float(new_width)/cur_width
- else:
- ratio = float(new_height)/cur_height
- else:
- if new_width == 0:
- ratio = float(new_height)/cur_height
- else:
- ratio = float(new_width)/cur_width
- resized = im.resize((int(cur_width*ratio), int(cur_height*ratio)), Image.ANTIALIAS)
-
- # Apply effect if found
- if self.effect is not None:
- resized = self.effect.process(resized)
- elif photosize.effect is not None:
- resized = photosize.effect.process(resized)
-
- # save resized file
- resized_filename = getattr(self, "get_%s_path" % photosize.name)()
- try:
- if im.format == 'JPEG':
- resized.save(resized_filename, 'JPEG', quality=int(photosize.quality),
- optimize=True)
- else:
- resized.save(resized_filename)
- except IOError, e:
- if os.path.isfile(resized_filename):
- os.unlink(resized_filename)
- raise e
-
-"""
-
-
-
-"""
-for p in photos.photos.photo:
- try:
- row = Photo.objects.get(flickr_id=p.id, flickr_secret=p.secret)
- # If the row exists already, set the dupe flag
- dupe = True
- #print 'already have '+p.id+' moving on'
- except ObjectDoesNotExist:
- #assign the photo to a place, uses "default" if the photo isn't near anything
- place = place_handler(force_unicode(p.latitude)+","+force_unicode(p.longitude))
- #Grab tags
- tags = client.flickr_photos_getInfo(user_id=settings.FLICKR_USER_ID, photo_id=force_unicode(p.id))
- # grab exif data
- exif = exif_handler(client.flickr_photos_getExif(user_id=API_KEY, photo_id=safestr(p.id)))
- # sort the exif data if it's available
- if exif.has_key("Aperture"):
- aperture = exif["Aperture"]
- else: aperture = ''
- if exif.has_key("Make"):
- make = exif["Make"]
- else: make = ''
- if exif.has_key("Model"):
- model = exif["Model"]
- else: model = ''
- if exif.has_key("Exposure"):
- exposure = exif["Exposure"]
- else: exposure = ''
- if exif.has_key("ISO Speed"):
- iso = exif["ISO Speed"]
- else: iso = ''
- if exif.has_key("Focal Length"):
- length = exif["Focal Length"]
- else: length = ''
- if exif.has_key("Date and Time (Original)"):
- dto = flickr_datetime_to_datetime(exif["Date and Time (Original)"].replace(':', '-', 2))
- else: dto = flickr_datetime_to_datetime(p.datetaken)
- photo = Photo.objects.create(
- title = p.title,
- flickr_id = p.id,
- flickr_owner = p.owner,
- flickr_server = p.server,
- flickr_secret = p.secret,
- flickr_originalsecret = tags.photo.originalsecret,
- flickr_farm = p.farm,
- pub_date = flickr_datetime_to_datetime(p.datetaken),
- description = force_unicode(tags.photo.description.PCDATA),
- exif_aperture = aperture,
- exif_make = make,
- exif_model = model,
- exif_shutter = exposure,
- exif_iso = iso,
- exif_lens = length,
- exif_date = dto,
- gps = force_unicode(p.latitude)+","+force_unicode(p.longitude),
- place = place,
- tags = str(", ".join(t.raw for t in tags.photo.tags.tag)).lower()
- )
- make_local_size(photo)
- #if (dupe):
- #break
-
-from locations.models import Location
-from photos.models import Photo
-from django.contrib.gis.geos import Point
-
-def find_loc(photos):
- for photo in photos:
- p = Point(photo.lon, photo.lat, srid=4326)
- try:
- loc = Location.objects.filter(geometry__contains=p).get()
- photo.location = loc
- print photo.id
- photo.save()
- except Location.DoesNotExist:
- print "photo %s does not fall within any exisiting location" %(photo.id)
-""" \ No newline at end of file
+