summaryrefslogtreecommitdiff
path: root/apps/photos/retriever.py
diff options
context:
space:
mode:
Diffstat (limited to 'apps/photos/retriever.py')
-rw-r--r--apps/photos/retriever.py501
1 files changed, 501 insertions, 0 deletions
diff --git a/apps/photos/retriever.py b/apps/photos/retriever.py
new file mode 100644
index 0000000..137fe9f
--- /dev/null
+++ b/apps/photos/retriever.py
@@ -0,0 +1,501 @@
+from __future__ import division
+import datetime
+import os
+import cStringIO # *much* faster than StringIO
+import urllib
+
+
+from django.contrib.contenttypes.models import ContentType
+from django.template.defaultfilters import slugify
+from django.core.exceptions import ObjectDoesNotExist
+from django.utils.encoding import force_unicode,smart_unicode
+from django.conf import settings
+
+# Required PIL classes may or may not be available from the root namespace
+# depending on the installation
+try:
+ import Image
+ import ImageFile
+ import ImageFilter
+ import ImageEnhance
+except ImportError:
+ try:
+ from PIL import Image
+ from PIL import ImageFile
+ from PIL import ImageFilter
+ from PIL import ImageEnhance
+ except ImportError:
+ raise ImportError("Could not import the Python Imaging Library.")
+
+
+from utils.strutils import safestr
+from photos.models import Photo,PhotoGallery
+
+
+# Flickr Sync stuffs
+API_KEY = settings.FLICKR_API_KEY
+from utils.APIClients import FlickrClient
+
+EXIF_PARAMS = {"Aperture":'f/2.8',"Make":'Apple',"Model":'iPhone',"Exposure":'',"ISO Speed":'',"Focal Length":'',"Shutter Speed":'',"Lens":'','Date and Time (Original)':'2008:07:03 22:44:25'}
+
+def sync_flickr_photos(*args, **kwargs):
+ cur_page = 1 # Start on the first page of the stream
+ paginate_by = 100 # Get 100 photos at a time
+ dupe = False # Set our dupe flag for the following loop
+ BASE_PATH = 'http://flickr.com/services/rest/'
+ client = FlickrClient(BASE_PATH, API_KEY)
+ data = client.flickr_people_getPublicPhotos(user_id=settings.FLICKR_USER_ID, page=cur_page, per_page=paginate_by,extras="date_upload,date_taken,geo")
+ #photos.photos.photo.reverse()
+ for post in data.findall('photos/photo'):
+ info = dict((k, smart_unicode(post.get(k))) for k in post.keys())
+ try:
+ row = Photo.objects.get(flickr_id=info['id'], flickr_secret=info['secret'])
+ # If the row exists already, set the dupe flag
+ dupe = True
+ print 'already have '+info['id']+' moving on'
+ except ObjectDoesNotExist:
+ #for debugging:
+ print info['title']
+ taglist = []
+ location, region = get_geo(float(info['latitude']),float(info['longitude']))
+ details = client.flickr_photos_getInfo(user_id=settings.FLICKR_USER_ID, photo_id=force_unicode(info['id']))
+ for t in details.findall('photo/tags/tag'):
+ tag = dict((k, smart_unicode(t.get(k))) for k in t.keys())
+ taglist.append(tag['raw'])
+ exif = exif_handler(client.flickr_photos_getExif(user_id=API_KEY, photo_id=safestr(info['id'])))
+
+ photo = Photo.objects.create(
+ title = info['title'],
+ flickr_id = info['id'],
+ flickr_owner = info['owner'],
+ flickr_server = info['server'],
+ flickr_secret = info['secret'],
+ flickr_originalsecret = force_unicode(details[0].attrib['originalsecret']),
+ flickr_farm = info['farm'],
+ pub_date = flickr_datetime_to_datetime(info['datetaken']),
+ description = force_unicode(details[0].findtext('description')),
+ exif_aperture = exif['Aperture'],
+ exif_make = exif['Make'],
+ exif_model = exif['Model'],
+ exif_exposure = exif['Exposure'],
+ exif_iso = exif['ISO Speed'],
+ exif_lens = exif['Lens'],
+ exif_focal_length = exif['Focal Length'],
+ exif_date = flickr_datetime_to_datetime(exif["Date and Time (Original)"].replace(':', '-', 2)),
+ lat = float(info['latitude']),
+ lon = float(info['longitude']),
+ region = region,
+ location = location,
+ tags = ", ".join(t for t in taglist)
+ )
+ #print info['title'], region, location
+ photo.save()
+ make_local_copies(photo)
+ slideshow_image(photo)
+
+
+def exif_handler(data):
+ converted = {}
+ try:
+ for t in data.findall('photo/exif'):
+ e = dict((k, smart_unicode(t.get(k))) for k in t.keys())
+ if safestr(e['label']) == "Aperture":
+ if not converted.has_key("Aperture"):
+ converted["Aperture"] = safestr(t.findtext('clean'))
+ else:
+ if safestr(e['tag']) != 'Exposure':
+ if safestr(e['tag']) == 'ExposureTime':
+ converted[safestr(e['label'])] = safestr(t.findtext('clean'))
+ else:
+ converted[safestr(e['label'])] = safestr(t.findtext('raw'))
+ except:
+ pass
+ for k,v in EXIF_PARAMS.items():
+ if not converted.has_key(k):
+ converted[k] = v
+ return converted
+
+
+def flickr_datetime_to_datetime(fdt):
+ from datetime import datetime
+ from time import strptime
+ date_parts = strptime(fdt, '%Y-%m-%d %H:%M:%S')
+ return datetime(*date_parts[0:6])
+
+def get_geo(lat,lon):
+ from locations.models import Location, Region
+ from django.contrib.gis.geos import Point
+ pnt_wkt = Point(lon, lat)
+ try:
+ location = Location.objects.get(geometry__contains=pnt_wkt)
+ except Location.DoesNotExist:
+ location = None
+ region = Region.objects.get(geometry__contains=pnt_wkt)
+ return location, region
+
+
+ImageFile.MAXBLOCK = 1000000
+
+def slideshow_image(photo):
+ slide_dir = settings.IMAGES_ROOT + '/slideshow/'+ photo.pub_date.strftime("%Y")
+ if not os.path.isdir(slide_dir):
+ os.makedirs(slide_dir)
+ med = photo.get_original_url()
+ fname = urllib.urlopen(med)
+ im = cStringIO.StringIO(fname.read()) # constructs a StringIO holding the image
+ img = Image.open(im)
+ cur_width, cur_height = img.size
+ #if image landscape
+ if cur_width > cur_height:
+ new_width = 1000
+ #check to make sure we aren't upsizing
+ if cur_width > new_width:
+ ratio = float(new_width)/cur_width
+ x = (cur_width * ratio)
+ y = (cur_height * ratio)
+ resized = img.resize((int(x), int(y)), Image.ANTIALIAS)
+ resized_filename = '%s/%s.jpg' %(slide_dir, photo.flickr_id)
+ resized.save(resized_filename, 'JPEG', quality=95, optimize=True)
+ else:
+ filename = '%s/%s.jpg' %(slide_dir, photo.flickr_id)
+ img.save(filename)
+ else:
+ #image portrait
+ new_height = 600
+ #check to make sure we aren't upsizing
+ if cur_height > new_height:
+ ratio = float(new_height)/cur_height
+ x = (cur_width * ratio)
+ y = (cur_height * ratio)
+ resized = img.resize((int(x), int(y)), Image.ANTIALIAS)
+ resized_filename = '%s/%s.jpg' %(slide_dir, photo.flickr_id)
+ resized.save(resized_filename, 'JPEG', quality=95, optimize=True)
+ else:
+ filename = '%s/%s.jpg' %(slide_dir, photo.flickr_id)
+ img.save(filename)
+
+def make_local_copies(photo):
+ orig_dir = settings.IMAGES_ROOT + '/flickr/full/'+ photo.pub_date.strftime("%Y")
+ if not os.path.isdir(orig_dir):
+ os.makedirs(orig_dir)
+ full = photo.get_original_url()
+ fname = urllib.urlopen(full)
+ im = cStringIO.StringIO(fname.read()) # constructs a StringIO holding the image
+ img = Image.open(im)
+ local_full = '%s/%s.jpg' %(orig_dir, photo.flickr_id)
+ img.save(local_full)
+ #save large size
+ large_dir = settings.IMAGES_ROOT + '/flickr/large/'+ photo.pub_date.strftime("%Y")
+ if not os.path.isdir(large_dir):
+ os.makedirs(large_dir)
+ large = photo.get_large_url()
+ fname = urllib.urlopen(large)
+ im = cStringIO.StringIO(fname.read()) # constructs a StringIO holding the image
+ img = Image.open(im)
+ local_large = '%s/%s.jpg' %(large_dir, photo.flickr_id)
+ if img.format == 'JPEG':
+ img.save(local_large)
+ #save medium size
+ med_dir = settings.IMAGES_ROOT + '/flickr/med/'+ photo.pub_date.strftime("%Y")
+ if not os.path.isdir(med_dir):
+ os.makedirs(med_dir)
+ med = photo.get_medium_url()
+ fname = urllib.urlopen(med)
+ im = cStringIO.StringIO(fname.read()) # constructs a StringIO holding the image
+ img = Image.open(im)
+ local_med = '%s/%s.jpg' %(med_dir, photo.flickr_id)
+ img.save(local_med)
+
+
+def make_gallery_thumb(photo,set):
+ crop_dir = settings.IMAGES_ROOT + '/gallery_thumbs/'
+ if not os.path.isdir(crop_dir):
+ os.makedirs(crop_dir)
+ remote = photo.get_original_url()
+ print remote
+ fname = urllib.urlopen(remote)
+ im = cStringIO.StringIO(fname.read()) # constructs a StringIO holding the image
+ img = Image.open(im)
+
+ #calculate crop:
+ cur_width, cur_height = img.size
+ new_width, new_height = 291, 350
+ ratio = max(float(new_width)/cur_width,float(new_height)/cur_height)
+ x = (cur_width * ratio)
+ y = (cur_height * ratio)
+ xd = abs(new_width - x)
+ yd = abs(new_height - y)
+ x_diff = int(xd / 2)
+ y_diff = int(yd / 2)
+ box = (int(x_diff), int(y_diff), int(x_diff+new_width), int(y_diff+new_height))
+
+ #create resized file
+ resized = img.resize((int(x), int(y)), Image.ANTIALIAS).crop(box)
+ # save resized file
+ resized_filename = '%s/%s.jpg' %(crop_dir, set.id)
+ try:
+ if img.format == 'JPEG':
+ resized.save(resized_filename, 'JPEG', quality=95, optimize=True)
+ else:
+ resized.save(resized_filename)
+ except IOError, e:
+ if os.path.isfile(resized_filename):
+ os.unlink(resized_filename)
+ raise e
+ #os.unlink(img)
+
+
+
+def sync_sets(*args, **kwargs):
+ dupe = False # Set our dupe flag for the following loop
+ BASE_PATH = 'http://flickr.com/services/rest/'
+ client = FlickrClient(BASE_PATH, API_KEY)
+ data = client.flickr_photosets_getList(user_id=settings.FLICKR_USER_ID)
+ nodes = data.findall('photosets/photoset')
+ for post in nodes:
+ info = dict((k, smart_unicode(post.get(k))) for k in post.keys())
+ try:
+ row = PhotoGallery.objects.get(set_id__exact=info['id'])
+ # okay it already exists, but is it up-to-date?
+ #get_photos_in_set(row,set.id)
+
+ except ObjectDoesNotExist:
+ disregard = ['POTD 2008','Snow Day','Wedding','Some random stuff']
+ if force_unicode(post.findtext('title')) in disregard:
+ pass
+ else:
+ s = PhotoGallery.objects.create (
+ set_id = force_unicode(info['id']),
+ set_title = force_unicode(post.findtext('title')),
+ set_desc = force_unicode(post.findtext('description')),
+ set_slug = slugify(force_unicode(post.findtext('title'))),
+ primary = force_unicode(info['primary']),
+ )
+ get_photos_in_set(s)
+ #create the gallery thumbnail image:
+ photo = Photo.objects.get(flickr_id__exact=str(info['primary']))
+ make_gallery_thumb(photo,s)
+
+def get_photos_in_set(set):
+ BASE_PATH = 'http://flickr.com/services/rest/'
+ client = FlickrClient(BASE_PATH, API_KEY)
+ data = client.flickr_photosets_getPhotos(user_id=settings.FLICKR_USER_ID, photoset_id=str(set.set_id))
+ for post in data.findall('photoset/photo'):
+ info = dict((k, smart_unicode(post.get(k))) for k in post.keys())
+ photo = Photo.objects.get(flickr_id__exact=str(info['id']))
+ set.photos.add(photo)
+
+
+"""
+def sync_flickr_comments(*args, **kwargs):
+ cur_page = 1 # Start on the first page of the stream
+ paginate_by = 100 # Get 100 photos at a time
+ inc = 1 # Set our dupe flag for the following loop
+ # Get our flickr client running
+ client = FlickrClient(settings.FLICKR_API_KEY)
+ # get total number of photos in stream
+ total = client.flickr_people_getInfo(user_id=settings.FLICKR_USER_ID)
+ total_num = safestr(total.person.photos.count)
+ while (inc < int(total_num)):
+ photos = client.flickr_people_getPublicPhotos(user_id=settings.FLICKR_USER_ID, page=cur_page, per_page=paginate_by,extras="date_upload,date_taken,geo")
+ incr = 1
+ for photo in photos.photos:
+ do_photo_comments(photo("id"),photo("secret"))
+ inc = inc+1
+ incr = incr+1
+ print cur_page
+ if incr == 100:
+ cur_page = cur_page+1
+
+
+
+def do_photo_comments(id, secret):
+ # Get our flickr client running
+ client = FlickrClient(settings.FLICKR_API_KEY)
+ photo = Photo.objects.get(flickr_id=id, flickr_secret=secret)
+ comments = client.flickr_photos_comments_getList(user_id=settings.FLICKR_USER_ID, photo_id=id)
+ for item in comments.comments:
+ try:
+ item
+ print item('authorname')
+ dt = datetime.datetime.fromtimestamp(float(item('datecreate')))
+ ctype = ContentType.objects.get_for_model(Photo)
+ try:
+ f = FreeComment.objects.get(content_type=ctype, object_id=photo.id, submit_date=dt)
+ #print f.id
+ except ObjectDoesNotExist:
+ if safestr(item('authorname')) == 'luxagraf':
+ mail = 'hyper@luxagraf.net'
+ else:
+ mail = slugify(item('authorname'))+'@flickr.com'
+ c = FreeComment.objects.create (
+ content_type = ctype,
+ object_id = photo.id,
+ comment = safestr(item),
+ person_name = safestr(item('authorname')),
+ person_email = mail,
+ person_url = item('permalink'),
+ is_public = True,
+ site_id = 1,
+ approved = 0,
+ submit_date = dt
+ )
+
+ except AttributeError:
+ pass
+
+ def get_country_name(lat,long):
+ name = GeoClient.findCountrySubdivision(lat,long, http_proxy=None)
+ if name.countrySubdivision.countryCode.PCDATA == "US":
+ r_name = "US-%s" %(force_unicode(STATE_LIST[name.countrySubdivision.adminCode1.PCDATA]))
+ else:
+ r_name = force_unicode(name.countrySubdivision.countryName.PCDATA)
+ return r_name
+
+
+def create_size(self, photosize):
+ if self.size_exists(photosize):
+ return
+ if not os.path.isdir(self.cache_path()):
+ os.makedirs(self.cache_path())
+ try:
+ im = Image.open(self.get_image_filename())
+ except IOError:
+ return
+ if im.size == photosize.size():
+ shutil.copy(self.get_image_filename(),
+ self._get_SIZE_path(photosize))
+ return
+ cur_width, cur_height = im.size
+ new_width, new_height = photosize.size()
+ if photosize.crop:
+ ratio = max(float(new_width)/cur_width,float(new_height)/cur_height)
+ x = (cur_width * ratio)
+ y = (cur_height * ratio)
+ xd = abs(new_width - x)
+ yd = abs(new_height - y)
+ x_diff = int(xd / 2)
+ y_diff = int(yd / 2)
+ if self.crop_from == 'top':
+ box = (int(x_diff), 0, int(x_diff+new_width), new_height)
+ elif self.crop_from == 'left':
+ box = (0, int(y_diff), new_width, int(y_diff+new_height))
+ elif self.crop_from == 'bottom':
+ box = (int(x_diff), int(yd), int(x_diff+new_width), int(y)) # y - yd = new_height
+ elif self.crop_from == 'right':
+ box = (int(xd), int(y_diff), int(x), int(y_diff+new_height)) # x - xd = new_width
+ else:
+ box = (int(x_diff), int(y_diff), int(x_diff+new_width), int(y_diff+new_height))
+ resized = im.resize((int(x), int(y)), Image.ANTIALIAS).crop(box)
+ else:
+ if not new_width == 0 and not new_height == 0:
+ if cur_width > cur_height:
+ ratio = float(new_width)/cur_width
+ else:
+ ratio = float(new_height)/cur_height
+ else:
+ if new_width == 0:
+ ratio = float(new_height)/cur_height
+ else:
+ ratio = float(new_width)/cur_width
+ resized = im.resize((int(cur_width*ratio), int(cur_height*ratio)), Image.ANTIALIAS)
+
+ # Apply effect if found
+ if self.effect is not None:
+ resized = self.effect.process(resized)
+ elif photosize.effect is not None:
+ resized = photosize.effect.process(resized)
+
+ # save resized file
+ resized_filename = getattr(self, "get_%s_path" % photosize.name)()
+ try:
+ if im.format == 'JPEG':
+ resized.save(resized_filename, 'JPEG', quality=int(photosize.quality),
+ optimize=True)
+ else:
+ resized.save(resized_filename)
+ except IOError, e:
+ if os.path.isfile(resized_filename):
+ os.unlink(resized_filename)
+ raise e
+
+"""
+
+
+
+"""
+for p in photos.photos.photo:
+ try:
+ row = Photo.objects.get(flickr_id=p.id, flickr_secret=p.secret)
+ # If the row exists already, set the dupe flag
+ dupe = True
+ #print 'already have '+p.id+' moving on'
+ except ObjectDoesNotExist:
+ #assign the photo to a place, uses "default" if the photo isn't near anything
+ place = place_handler(force_unicode(p.latitude)+","+force_unicode(p.longitude))
+ #Grab tags
+ tags = client.flickr_photos_getInfo(user_id=settings.FLICKR_USER_ID, photo_id=force_unicode(p.id))
+ # grab exif data
+ exif = exif_handler(client.flickr_photos_getExif(user_id=API_KEY, photo_id=safestr(p.id)))
+ # sort the exif data if it's available
+ if exif.has_key("Aperture"):
+ aperture = exif["Aperture"]
+ else: aperture = ''
+ if exif.has_key("Make"):
+ make = exif["Make"]
+ else: make = ''
+ if exif.has_key("Model"):
+ model = exif["Model"]
+ else: model = ''
+ if exif.has_key("Exposure"):
+ exposure = exif["Exposure"]
+ else: exposure = ''
+ if exif.has_key("ISO Speed"):
+ iso = exif["ISO Speed"]
+ else: iso = ''
+ if exif.has_key("Focal Length"):
+ length = exif["Focal Length"]
+ else: length = ''
+ if exif.has_key("Date and Time (Original)"):
+ dto = flickr_datetime_to_datetime(exif["Date and Time (Original)"].replace(':', '-', 2))
+ else: dto = flickr_datetime_to_datetime(p.datetaken)
+ photo = Photo.objects.create(
+ title = p.title,
+ flickr_id = p.id,
+ flickr_owner = p.owner,
+ flickr_server = p.server,
+ flickr_secret = p.secret,
+ flickr_originalsecret = tags.photo.originalsecret,
+ flickr_farm = p.farm,
+ pub_date = flickr_datetime_to_datetime(p.datetaken),
+ description = force_unicode(tags.photo.description.PCDATA),
+ exif_aperture = aperture,
+ exif_make = make,
+ exif_model = model,
+ exif_shutter = exposure,
+ exif_iso = iso,
+ exif_lens = length,
+ exif_date = dto,
+ gps = force_unicode(p.latitude)+","+force_unicode(p.longitude),
+ place = place,
+ tags = str(", ".join(t.raw for t in tags.photo.tags.tag)).lower()
+ )
+ make_local_size(photo)
+ #if (dupe):
+ #break
+
+from locations.models import Location
+from photos.models import Photo
+from django.contrib.gis.geos import Point
+
+def find_loc(photos):
+ for photo in photos:
+ p = Point(photo.lon, photo.lat, srid=4326)
+ try:
+ loc = Location.objects.filter(geometry__contains=p).get()
+ photo.location = loc
+ print photo.id
+ photo.save()
+ except Location.DoesNotExist:
+ print "photo %s does not fall within any exisiting location" %(photo.id)
+""" \ No newline at end of file