phd.bib

@comment{{This file has been generated by bib2bib 1.96}}
@comment{{Command line: ./bib2bib -ob phd.bib -c '$type = "PHDTHESIS"' -s year -r mediamill.bib}}
@phdthesis{LiPHD12,
  author = {Xirong Li},
  title = {Content-Based Visual Search Learned from Social Media},
  year = 2012,
  school = {University of Amsterdam},
  address = {Amsterdam, The Netherlands}
}
@phdthesis{SandPHD11,
  author = {Koen E. A. {van de Sande}},
  title = {Invariant Color Descriptors for Efficient Object Recognition},
  year = 2011,
  school = {University of Amsterdam},
  address = {Amsterdam, The Netherlands}
}
@phdthesis{HuurPHD10,
  author = {Bouke Huurnink},
  title = {Search in Audiovisual Broadcast Archives},
  year = 2010,
  school = {University of Amsterdam},
  address = {Amsterdam, The Netherlands}
}
@phdthesis{GemePHD10,
  author = {Jan C. {van Gemert}},
  title = {Robust Visual Scene Categorization in Context},
  year = 2010,
  school = {University of Amsterdam},
  address = {Amsterdam, The Netherlands}
}
@phdthesis{NguyenPHD06,
  author = {Giang P. Nguyen},
  title = {Interactive Image Search using Similarity-Based Visualization},
  month = {December},
  year = {2006},
  school = {University of Amsterdam},
  pdf = {http://www.science.uva.nl/research/mediamill/pub/nguyen-thesis.pdf},
  abstract = {
  		 
  		 To search for images in a small collection, it can be done by just looking
		 at them one-by-one. The sizes of image collections on the web or
		 professional collections are in the order of a hundred thousand if not a
		 million. For such collections systems should provide efficient browsing
		 techniques. As most of the time, users are non-expert searchers the
		 systems must have a user friendly interface. To satisfy these
		 requirements, we design image search systems that allow the user to
		 interact with image collections in an intuitive way. To that end, advanced
		 visualization techniques are used in which a cloud of images is presented
		 on the screen in such a way that similar images are presented close to each
		 other. In this way the user's attention is pointed to the right search
		 direction. While exploring this direction the user can give feedback to
		 the system by indicating relevant images. The system than learns to adapt
		 itself to get closer to the user's search expectation. We have demonstrated 
		 our proposed approach to different image collections from simple to very 
		 complicated ones such as images taken from large news video archives. The 
		 experimental results show a significant improvement in search performance 
		 over existing methods.
		 
  		}
}
@phdthesis{SnoekPHD05,
  author = {Cees G. M. Snoek},
  title = {The Authoring Metaphor to Machine Understanding of Multimedia},
  month = {October},
  year = {2005},
  school = {University of Amsterdam},
  pdf = {http://isis-data.science.uva.nl/cgmsnoek/pub/snoek-thesis.pdf},
  abstract = {
  
  		 This thesis makes a contribution to the field of multimedia understanding. Where 
  		 our ultimate aim is to structure the digital multimedia chaos by bridging the 
  		 semantic gap between computable data features on one end and the semantic 
  		 interpretation of the data by a user on the other end. We distinguish between 
  		 produced and non-produced multimedia or video documents. We depart from the view 
  		 that a produced video is the result of an authoring-driven production process. 
  		 This authoring process serves as a metaphor for machine-driven understanding. 
  		 We present a step-by-step extrapolation of this authoring metaphor for automatic 
  		 multimedia understanding. While doing so, we cover in this thesis an extensive 
  		 overview of the field, a theoretical foundation for authoring-driven multimedia 
  		 understanding, state-of-the-art benchmark validation, and practical semantic 
  		 video retrieval applications.
  		 
  		}
}