From: <li...@us...> - 2008-11-11 11:17:09
|
Revision: 573 http://pyphant.svn.sourceforge.net/pyphant/?rev=573&view=rev Author: liehr Date: 2008-11-11 11:17:02 +0000 (Tue, 11 Nov 2008) Log Message: ----------- Included CeCILL-B version of Konrad Hinsens PhysicalQuantities modul. Modified Paths: -------------- trunk/src/pyphant/pyphant/core/DataContainer.py trunk/src/pyphant/pyphant/core/FieldContainer.py trunk/src/pyphant/pyphant/core/PyTablesPersister.py trunk/src/pyphant/pyphant/tests/TestDataContainer.py trunk/src/pyphant/pyphant/tests/TestPyTablesPersister.py trunk/src/pyphant/pyphant/visualizers/Chart.py trunk/src/pyphant/pyphant/visualizers/External.py trunk/src/pyphant/pyphant/visualizers/ImageVisualizer.py trunk/src/workers/ImageProcessing/ImageProcessing/DistanceMapper.py trunk/src/workers/ImageProcessing/ImageProcessing/ImageLoaderWorker.py trunk/src/workers/ImageProcessing/ImageProcessing/SlopeCalculator.py trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestApplyMask.py trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestDistanceMapper.py trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestImageLoader.py trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestMedianiser.py trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestSkeletonizeFeature.py trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestSlopeCalculator.py trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestThresholdingWorker.py trunk/src/workers/OSC/OSC/CompareFields.py trunk/src/workers/OSC/OSC/ComputeFunctional.py trunk/src/workers/OSC/OSC/ErrorEstimator.py trunk/src/workers/OSC/OSC/EstimateParameter.py trunk/src/workers/OSC/OSC/ExtremumFinder.py trunk/src/workers/OSC/OSC/MRA.py trunk/src/workers/OSC/OSC/OscAbsorption.py trunk/src/workers/OSC/OSC/OscCurrent.py trunk/src/workers/OSC/OSC/OscLoader.py trunk/src/workers/OSC/OSC/OscVisualisers.py trunk/src/workers/OSC/OSC/Slicing.py trunk/src/workers/OSC/OSC/Smoother.py trunk/src/workers/OSC/OSC/ThicknessModeller.py trunk/src/workers/OSC/OSC/tests/TestExtremumFinder.py trunk/src/workers/OSC/OSC/tests/TestMRA.py trunk/src/workers/OSC/OSC/tests/TestOscAbsorption.py trunk/src/workers/Statistics/Statistics/tests/TestHistogram.py trunk/src/workers/fmfile/fmfile/FMFLoader.py trunk/src/workers/fmfile/fmfile/FMFLoader2.py trunk/src/workers/fmfile/fmfile/FMFpythonTree.py Added Paths: ----------- trunk/src/pyphant/pyphant/quantities/ trunk/src/pyphant/pyphant/quantities/LICENCE trunk/src/pyphant/pyphant/quantities/NumberDict.py trunk/src/pyphant/pyphant/quantities/PhysicalQuantities.py trunk/src/pyphant/pyphant/quantities/__init__.py Modified: trunk/src/pyphant/pyphant/core/DataContainer.py =================================================================== --- trunk/src/pyphant/pyphant/core/DataContainer.py 2008-11-05 09:41:50 UTC (rev 572) +++ trunk/src/pyphant/pyphant/core/DataContainer.py 2008-11-11 11:17:02 UTC (rev 573) @@ -58,7 +58,7 @@ import scipy, copy, md5, threading, numpy, StringIO import os, platform, datetime, socket, urlparse -from Scientific.Physics.PhysicalQuantities import (isPhysicalQuantity, PhysicalQuantity,_prefixes) +from pyphant.quantities.PhysicalQuantities import (isPhysicalQuantity, PhysicalQuantity,_prefixes) import logging Modified: trunk/src/pyphant/pyphant/core/FieldContainer.py =================================================================== --- trunk/src/pyphant/pyphant/core/FieldContainer.py 2008-11-05 09:41:50 UTC (rev 572) +++ trunk/src/pyphant/pyphant/core/FieldContainer.py 2008-11-11 11:17:02 UTC (rev 573) @@ -36,7 +36,7 @@ import scipy, copy, md5, threading, numpy, StringIO import os, platform, datetime, socket, urlparse -from Scientific.Physics.PhysicalQuantities import (isPhysicalQuantity, PhysicalQuantity,_prefixes) +from pyphant.quantities.PhysicalQuantities import (isPhysicalQuantity, PhysicalQuantity,_prefixes) from DataContainer import DataContainer, enc, _logger #Default variables of indices Modified: trunk/src/pyphant/pyphant/core/PyTablesPersister.py =================================================================== --- trunk/src/pyphant/pyphant/core/PyTablesPersister.py 2008-11-05 09:41:50 UTC (rev 572) +++ trunk/src/pyphant/pyphant/core/PyTablesPersister.py 2008-11-11 11:17:02 UTC (rev 573) @@ -65,7 +65,7 @@ import sys from pyphant.core import (CompositeWorker, DataContainer) from tables import StringCol, Col -from Scientific.Physics.PhysicalQuantities import PhysicalQuantity +from pyphant.quantities.PhysicalQuantities import PhysicalQuantity import scipy import logging _logger = logging.getLogger("pyphant") Added: trunk/src/pyphant/pyphant/quantities/LICENCE =================================================================== --- trunk/src/pyphant/pyphant/quantities/LICENCE (rev 0) +++ trunk/src/pyphant/pyphant/quantities/LICENCE 2008-11-11 11:17:02 UTC (rev 573) @@ -0,0 +1,519 @@ + +CONTRAT DE LICENCE DE LOGICIEL LIBRE CeCILL-B + + + Avertissement + +Ce contrat est une licence de logiciel libre issue d'une concertation +entre ses auteurs afin que le respect de deux grands principes pr\xE9side \xE0 +sa r\xE9daction: + + * d'une part, le respect des principes de diffusion des logiciels + libres: acc\xE8s au code source, droits \xE9tendus conf\xE9r\xE9s aux + utilisateurs, + * d'autre part, la d\xE9signation d'un droit applicable, le droit + fran\xE7ais, auquel elle est conforme, tant au regard du droit de la + responsabilit\xE9 civile que du droit de la propri\xE9t\xE9 intellectuelle + et de la protection qu'il offre aux auteurs et titulaires des + droits patrimoniaux sur un logiciel. + +Les auteurs de la licence CeCILL-B (pour Ce[a] C[nrs] I[nria] L[ogiciel] +L[ibre]) sont: + +Commissariat \xE0 l'Energie Atomique - CEA, \xE9tablissement public de +recherche \xE0 caract\xE8re scientifique, technique et industriel, dont le +si\xE8ge est situ\xE9 25 rue Leblanc, immeuble Le Ponant D, 75015 Paris. + +Centre National de la Recherche Scientifique - CNRS, \xE9tablissement +public \xE0 caract\xE8re scientifique et technologique, dont le si\xE8ge est +situ\xE9 3 rue Michel-Ange, 75794 Paris cedex 16. + +Institut National de Recherche en Informatique et en Automatique - +INRIA, \xE9tablissement public \xE0 caract\xE8re scientifique et technologique, +dont le si\xE8ge est situ\xE9 Domaine de Voluceau, Rocquencourt, BP 105, 78153 +Le Chesnay cedex. + + + Pr\xE9ambule + +Ce contrat est une licence de logiciel libre dont l'objectif est de +conf\xE9rer aux utilisateurs une tr\xE8s large libert\xE9 de modification et de +redistribution du logiciel r\xE9gi par cette licence. + +L'exercice de cette libert\xE9 est assorti d'une obligation forte de +citation \xE0 la charge de ceux qui distribueraient un logiciel incorporant +un logiciel r\xE9gi par la pr\xE9sente licence afin d'assurer que les +contributions de tous soient correctement identifi\xE9es et reconnues. + +L'accessibilit\xE9 au code source et les droits de copie, de modification +et de redistribution qui d\xE9coulent de ce contrat ont pour contrepartie +de n'offrir aux utilisateurs qu'une garantie limit\xE9e et de ne faire +peser sur l'auteur du logiciel, le titulaire des droits patrimoniaux et +les conc\xE9dants successifs qu'une responsabilit\xE9 restreinte. + +A cet \xE9gard l'attention de l'utilisateur est attir\xE9e sur les risques +associ\xE9s au chargement, \xE0 l'utilisation, \xE0 la modification et/ou au +d\xE9veloppement et \xE0 la reproduction du logiciel par l'utilisateur \xE9tant +donn\xE9 sa sp\xE9cificit\xE9 de logiciel libre, qui peut le rendre complexe \xE0 +manipuler et qui le r\xE9serve donc \xE0 des d\xE9veloppeurs ou des +professionnels avertis poss\xE9dant des connaissances informatiques +approfondies. Les utilisateurs sont donc invit\xE9s \xE0 charger et tester +l'ad\xE9quation du logiciel \xE0 leurs besoins dans des conditions permettant +d'assurer la s\xE9curit\xE9 de leurs syst\xE8mes et/ou de leurs donn\xE9es et, plus +g\xE9n\xE9ralement, \xE0 l'utiliser et l'exploiter dans les m\xEAmes conditions de +s\xE9curit\xE9. Ce contrat peut \xEAtre reproduit et diffus\xE9 librement, sous +r\xE9serve de le conserver en l'\xE9tat, sans ajout ni suppression de clauses. + +Ce contrat est susceptible de s'appliquer \xE0 tout logiciel dont le +titulaire des droits patrimoniaux d\xE9cide de soumettre l'exploitation aux +dispositions qu'il contient. + + + Article 1 - DEFINITIONS + +Dans ce contrat, les termes suivants, lorsqu'ils seront \xE9crits avec une +lettre capitale, auront la signification suivante: + +Contrat: d\xE9signe le pr\xE9sent contrat de licence, ses \xE9ventuelles versions +post\xE9rieures et annexes. + +Logiciel: d\xE9signe le logiciel sous sa forme de Code Objet et/ou de Code +Source et le cas \xE9ch\xE9ant sa documentation, dans leur \xE9tat au moment de +l'acceptation du Contrat par le Licenci\xE9. + +Logiciel Initial: d\xE9signe le Logiciel sous sa forme de Code Source et +\xE9ventuellement de Code Objet et le cas \xE9ch\xE9ant sa documentation, dans +leur \xE9tat au moment de leur premi\xE8re diffusion sous les termes du Contrat. + +Logiciel Modifi\xE9: d\xE9signe le Logiciel modifi\xE9 par au moins une +Contribution. + +Code Source: d\xE9signe l'ensemble des instructions et des lignes de +programme du Logiciel et auquel l'acc\xE8s est n\xE9cessaire en vue de +modifier le Logiciel. + +Code Objet: d\xE9signe les fichiers binaires issus de la compilation du +Code Source. + +Titulaire: d\xE9signe le ou les d\xE9tenteurs des droits patrimoniaux d'auteur +sur le Logiciel Initial. + +Licenci\xE9: d\xE9signe le ou les utilisateurs du Logiciel ayant accept\xE9 le +Contrat. + +Contributeur: d\xE9signe le Licenci\xE9 auteur d'au moins une Contribution. + +Conc\xE9dant: d\xE9signe le Titulaire ou toute personne physique ou morale +distribuant le Logiciel sous le Contrat. + +Contribution: d\xE9signe l'ensemble des modifications, corrections, +traductions, adaptations et/ou nouvelles fonctionnalit\xE9s int\xE9gr\xE9es dans +le Logiciel par tout Contributeur, ainsi que tout Module Interne. + +Module: d\xE9signe un ensemble de fichiers sources y compris leur +documentation qui permet de r\xE9aliser des fonctionnalit\xE9s ou services +suppl\xE9mentaires \xE0 ceux fournis par le Logiciel. + +Module Externe: d\xE9signe tout Module, non d\xE9riv\xE9 du Logiciel, tel que ce +Module et le Logiciel s'ex\xE9cutent dans des espaces d'adressage +diff\xE9rents, l'un appelant l'autre au moment de leur ex\xE9cution. + +Module Interne: d\xE9signe tout Module li\xE9 au Logiciel de telle sorte +qu'ils s'ex\xE9cutent dans le m\xEAme espace d'adressage. + +Parties: d\xE9signe collectivement le Licenci\xE9 et le Conc\xE9dant. + +Ces termes s'entendent au singulier comme au pluriel. + + + Article 2 - OBJET + +Le Contrat a pour objet la concession par le Conc\xE9dant au Licenci\xE9 d'une +licence non exclusive, cessible et mondiale du Logiciel telle que +d\xE9finie ci-apr\xE8s \xE0 l'article 5 pour toute la dur\xE9e de protection des droits +portant sur ce Logiciel. + + + Article 3 - ACCEPTATION + +3.1 L'acceptation par le Licenci\xE9 des termes du Contrat est r\xE9put\xE9e +acquise du fait du premier des faits suivants: + + * (i) le chargement du Logiciel par tout moyen notamment par + t\xE9l\xE9chargement \xE0 partir d'un serveur distant ou par chargement \xE0 + partir d'un support physique; + * (ii) le premier exercice par le Licenci\xE9 de l'un quelconque des + droits conc\xE9d\xE9s par le Contrat. + +3.2 Un exemplaire du Contrat, contenant notamment un avertissement +relatif aux sp\xE9cificit\xE9s du Logiciel, \xE0 la restriction de garantie et \xE0 +la limitation \xE0 un usage par des utilisateurs exp\xE9riment\xE9s a \xE9t\xE9 mis \xE0 +disposition du Licenci\xE9 pr\xE9alablement \xE0 son acceptation telle que +d\xE9finie \xE0 l'article 3.1 ci dessus et le Licenci\xE9 reconna\xEEt en avoir pris +connaissance. + + + Article 4 - ENTREE EN VIGUEUR ET DUREE + + + 4.1 ENTREE EN VIGUEUR + +Le Contrat entre en vigueur \xE0 la date de son acceptation par le Licenci\xE9 +telle que d\xE9finie en 3.1. + + + 4.2 DUREE + +Le Contrat produira ses effets pendant toute la dur\xE9e l\xE9gale de +protection des droits patrimoniaux portant sur le Logiciel. + + + Article 5 - ETENDUE DES DROITS CONCEDES + +Le Conc\xE9dant conc\xE8de au Licenci\xE9, qui accepte, les droits suivants sur +le Logiciel pour toutes destinations et pour la dur\xE9e du Contrat dans +les conditions ci-apr\xE8s d\xE9taill\xE9es. + +Par ailleurs, si le Conc\xE9dant d\xE9tient ou venait \xE0 d\xE9tenir un ou +plusieurs brevets d'invention prot\xE9geant tout ou partie des +fonctionnalit\xE9s du Logiciel ou de ses composants, il s'engage \xE0 ne pas +opposer les \xE9ventuels droits conf\xE9r\xE9s par ces brevets aux Licenci\xE9s +successifs qui utiliseraient, exploiteraient ou modifieraient le +Logiciel. En cas de cession de ces brevets, le Conc\xE9dant s'engage \xE0 +faire reprendre les obligations du pr\xE9sent alin\xE9a aux cessionnaires. + + + 5.1 DROIT D'UTILISATION + +Le Licenci\xE9 est autoris\xE9 \xE0 utiliser le Logiciel, sans restriction quant +aux domaines d'application, \xE9tant ci-apr\xE8s pr\xE9cis\xE9 que cela comporte: + + 1. la reproduction permanente ou provisoire du Logiciel en tout ou + partie par tout moyen et sous toute forme. + + 2. le chargement, l'affichage, l'ex\xE9cution, ou le stockage du + Logiciel sur tout support. + + 3. la possibilit\xE9 d'en observer, d'en \xE9tudier, ou d'en tester le + fonctionnement afin de d\xE9terminer les id\xE9es et principes qui sont + \xE0 la base de n'importe quel \xE9l\xE9ment de ce Logiciel; et ceci, + lorsque le Licenci\xE9 effectue toute op\xE9ration de chargement, + d'affichage, d'ex\xE9cution, de transmission ou de stockage du + Logiciel qu'il est en droit d'effectuer en vertu du Contrat. + + + 5.2 DROIT D'APPORTER DES CONTRIBUTIONS + +Le droit d'apporter des Contributions comporte le droit de traduire, +d'adapter, d'arranger ou d'apporter toute autre modification au Logiciel +et le droit de reproduire le logiciel en r\xE9sultant. + +Le Licenci\xE9 est autoris\xE9 \xE0 apporter toute Contribution au Logiciel sous +r\xE9serve de mentionner, de fa\xE7on explicite, son nom en tant qu'auteur de +cette Contribution et la date de cr\xE9ation de celle-ci. + + + 5.3 DROIT DE DISTRIBUTION + +Le droit de distribution comporte notamment le droit de diffuser, de +transmettre et de communiquer le Logiciel au public sur tout support et +par tout moyen ainsi que le droit de mettre sur le march\xE9 \xE0 titre +on\xE9reux ou gratuit, un ou des exemplaires du Logiciel par tout proc\xE9d\xE9. + +Le Licenci\xE9 est autoris\xE9 \xE0 distribuer des copies du Logiciel, modifi\xE9 ou +non, \xE0 des tiers dans les conditions ci-apr\xE8s d\xE9taill\xE9es. + + + 5.3.1 DISTRIBUTION DU LOGICIEL SANS MODIFICATION + +Le Licenci\xE9 est autoris\xE9 \xE0 distribuer des copies conformes du Logiciel, +sous forme de Code Source ou de Code Objet, \xE0 condition que cette +distribution respecte les dispositions du Contrat dans leur totalit\xE9 et +soit accompagn\xE9e: + + 1. d'un exemplaire du Contrat, + + 2. d'un avertissement relatif \xE0 la restriction de garantie et de + responsabilit\xE9 du Conc\xE9dant telle que pr\xE9vue aux articles 8 + et 9, + +et que, dans le cas o\xF9 seul le Code Objet du Logiciel est redistribu\xE9, +le Licenci\xE9 permette un acc\xE8s effectif au Code Source complet du +Logiciel pendant au moins toute la dur\xE9e de sa distribution du Logiciel, +\xE9tant entendu que le co\xFBt additionnel d'acquisition du Code Source ne +devra pas exc\xE9der le simple co\xFBt de transfert des donn\xE9es. + + + 5.3.2 DISTRIBUTION DU LOGICIEL MODIFIE + +Lorsque le Licenci\xE9 apporte une Contribution au Logiciel, le Logiciel +Modifi\xE9 peut \xEAtre distribu\xE9 sous un contrat de licence autre que le +pr\xE9sent Contrat sous r\xE9serve du respect des dispositions de l'article +5.3.4. + + + 5.3.3 DISTRIBUTION DES MODULES EXTERNES + +Lorsque le Licenci\xE9 a d\xE9velopp\xE9 un Module Externe les conditions du +Contrat ne s'appliquent pas \xE0 ce Module Externe, qui peut \xEAtre distribu\xE9 +sous un contrat de licence diff\xE9rent. + + + 5.3.4 CITATIONS + +Le Licenci\xE9 qui distribue un Logiciel Modifi\xE9 s'engage express\xE9ment: + + 1. \xE0 indiquer dans sa documentation qu'il a \xE9t\xE9 r\xE9alis\xE9 \xE0 partir du + Logiciel r\xE9gi par le Contrat, en reproduisant les mentions de + propri\xE9t\xE9 intellectuelle du Logiciel, + + 2. \xE0 faire en sorte que l'utilisation du Logiciel, ses mentions de + propri\xE9t\xE9 intellectuelle et le fait qu'il est r\xE9gi par le Contrat + soient indiqu\xE9s dans un texte facilement accessible depuis + l'interface du Logiciel Modifi\xE9, + + 3. \xE0 mentionner, sur un site Web librement accessible d\xE9crivant le + Logiciel Modifi\xE9, et pendant au moins toute la dur\xE9e de sa + distribution, qu'il a \xE9t\xE9 r\xE9alis\xE9 \xE0 partir du Logiciel r\xE9gi par le + Contrat, en reproduisant les mentions de propri\xE9t\xE9 intellectuelle + du Logiciel, + + 4. lorsqu'il le distribue \xE0 un tiers susceptible de distribuer + lui-m\xEAme un Logiciel Modifi\xE9, sans avoir \xE0 en distribuer le code + source, \xE0 faire ses meilleurs efforts pour que les obligations du + pr\xE9sent article 5.3.4 soient reprises par le dit tiers. + +Lorsque le Logiciel modifi\xE9 ou non est distribu\xE9 avec un Module Externe +qui a \xE9t\xE9 con\xE7u pour l'utiliser, le Licenci\xE9 doit soumettre le dit +Module Externe aux obligations pr\xE9c\xE9dentes. + + + 5.3.5 COMPATIBILITE AVEC LES LICENCES CeCILL et CeCILL-C + +Lorsqu'un Logiciel Modifi\xE9 contient une Contribution soumise au contrat +de licence CeCILL, les stipulations pr\xE9vues \xE0 l'article 5.3.4 sont +facultatives. + +Un Logiciel Modifi\xE9 peut \xEAtre distribu\xE9 sous le contrat de licence +CeCILL-C. Les stipulations pr\xE9vues \xE0 l'article 5.3.4 sont alors +facultatives. + + + Article 6 - PROPRIETE INTELLECTUELLE + + + 6.1 SUR LE LOGICIEL INITIAL + +Le Titulaire est d\xE9tenteur des droits patrimoniaux sur le Logiciel +Initial. Toute utilisation du Logiciel Initial est soumise au respect +des conditions dans lesquelles le Titulaire a choisi de diffuser son +oeuvre et nul autre n'a la facult\xE9 de modifier les conditions de +diffusion de ce Logiciel Initial. + +Le Titulaire s'engage \xE0 ce que le Logiciel Initial reste au moins r\xE9gi +par le Contrat et ce, pour la dur\xE9e vis\xE9e \xE0 l'article 4.2. + + + 6.2 SUR LES CONTRIBUTIONS + +Le Licenci\xE9 qui a d\xE9velopp\xE9 une Contribution est titulaire sur celle-ci +des droits de propri\xE9t\xE9 intellectuelle dans les conditions d\xE9finies par +la l\xE9gislation applicable. + + + 6.3 SUR LES MODULES EXTERNES + +Le Licenci\xE9 qui a d\xE9velopp\xE9 un Module Externe est titulaire sur celui-ci +des droits de propri\xE9t\xE9 intellectuelle dans les conditions d\xE9finies par +la l\xE9gislation applicable et reste libre du choix du contrat r\xE9gissant +sa diffusion. + + + 6.4 DISPOSITIONS COMMUNES + +Le Licenci\xE9 s'engage express\xE9ment: + + 1. \xE0 ne pas supprimer ou modifier de quelque mani\xE8re que ce soit les + mentions de propri\xE9t\xE9 intellectuelle appos\xE9es sur le Logiciel; + + 2. \xE0 reproduire \xE0 l'identique lesdites mentions de propri\xE9t\xE9 + intellectuelle sur les copies du Logiciel modifi\xE9 ou non. + +Le Licenci\xE9 s'engage \xE0 ne pas porter atteinte, directement ou +indirectement, aux droits de propri\xE9t\xE9 intellectuelle du Titulaire et/ou +des Contributeurs sur le Logiciel et \xE0 prendre, le cas \xE9ch\xE9ant, \xE0 +l'\xE9gard de son personnel toutes les mesures n\xE9cessaires pour assurer le +respect des dits droits de propri\xE9t\xE9 intellectuelle du Titulaire et/ou +des Contributeurs. + + + Article 7 - SERVICES ASSOCIES + +7.1 Le Contrat n'oblige en aucun cas le Conc\xE9dant \xE0 la r\xE9alisation de +prestations d'assistance technique ou de maintenance du Logiciel. + +Cependant le Conc\xE9dant reste libre de proposer ce type de services. Les +termes et conditions d'une telle assistance technique et/ou d'une telle +maintenance seront alors d\xE9termin\xE9s dans un acte s\xE9par\xE9. Ces actes de +maintenance et/ou assistance technique n'engageront que la seule +responsabilit\xE9 du Conc\xE9dant qui les propose. + +7.2 De m\xEAme, tout Conc\xE9dant est libre de proposer, sous sa seule +responsabilit\xE9, \xE0 ses licenci\xE9s une garantie, qui n'engagera que lui, +lors de la redistribution du Logiciel et/ou du Logiciel Modifi\xE9 et ce, +dans les conditions qu'il souhaite. Cette garantie et les modalit\xE9s +financi\xE8res de son application feront l'objet d'un acte s\xE9par\xE9 entre le +Conc\xE9dant et le Licenci\xE9. + + + Article 8 - RESPONSABILITE + +8.1 Sous r\xE9serve des dispositions de l'article 8.2, le Licenci\xE9 a la +facult\xE9, sous r\xE9serve de prouver la faute du Conc\xE9dant concern\xE9, de +solliciter la r\xE9paration du pr\xE9judice direct qu'il subirait du fait du +Logiciel et dont il apportera la preuve. + +8.2 La responsabilit\xE9 du Conc\xE9dant est limit\xE9e aux engagements pris en +application du Contrat et ne saurait \xEAtre engag\xE9e en raison notamment: +(i) des dommages dus \xE0 l'inex\xE9cution, totale ou partielle, de ses +obligations par le Licenci\xE9, (ii) des dommages directs ou indirects +d\xE9coulant de l'utilisation ou des performances du Logiciel subis par le +Licenci\xE9 et (iii) plus g\xE9n\xE9ralement d'un quelconque dommage indirect. En +particulier, les Parties conviennent express\xE9ment que tout pr\xE9judice +financier ou commercial (par exemple perte de donn\xE9es, perte de +b\xE9n\xE9fices, perte d'exploitation, perte de client\xE8le ou de commandes, +manque \xE0 gagner, trouble commercial quelconque) ou toute action dirig\xE9e +contre le Licenci\xE9 par un tiers, constitue un dommage indirect et +n'ouvre pas droit \xE0 r\xE9paration par le Conc\xE9dant. + + + Article 9 - GARANTIE + +9.1 Le Licenci\xE9 reconna\xEEt que l'\xE9tat actuel des connaissances +scientifiques et techniques au moment de la mise en circulation du +Logiciel ne permet pas d'en tester et d'en v\xE9rifier toutes les +utilisations ni de d\xE9tecter l'existence d'\xE9ventuels d\xE9fauts. L'attention +du Licenci\xE9 a \xE9t\xE9 attir\xE9e sur ce point sur les risques associ\xE9s au +chargement, \xE0 l'utilisation, la modification et/ou au d\xE9veloppement et \xE0 +la reproduction du Logiciel qui sont r\xE9serv\xE9s \xE0 des utilisateurs avertis. + +Il rel\xE8ve de la responsabilit\xE9 du Licenci\xE9 de contr\xF4ler, par tous +moyens, l'ad\xE9quation du produit \xE0 ses besoins, son bon fonctionnement et +de s'assurer qu'il ne causera pas de dommages aux personnes et aux biens. + +9.2 Le Conc\xE9dant d\xE9clare de bonne foi \xEAtre en droit de conc\xE9der +l'ensemble des droits attach\xE9s au Logiciel (comprenant notamment les +droits vis\xE9s \xE0 l'article 5). + +9.3 Le Licenci\xE9 reconna\xEEt que le Logiciel est fourni "en l'\xE9tat" par le +Conc\xE9dant sans autre garantie, expresse ou tacite, que celle pr\xE9vue \xE0 +l'article 9.2 et notamment sans aucune garantie sur sa valeur commerciale, +son caract\xE8re s\xE9curis\xE9, innovant ou pertinent. + +En particulier, le Conc\xE9dant ne garantit pas que le Logiciel est exempt +d'erreur, qu'il fonctionnera sans interruption, qu'il sera compatible +avec l'\xE9quipement du Licenci\xE9 et sa configuration logicielle ni qu'il +remplira les besoins du Licenci\xE9. + +9.4 Le Conc\xE9dant ne garantit pas, de mani\xE8re expresse ou tacite, que le +Logiciel ne porte pas atteinte \xE0 un quelconque droit de propri\xE9t\xE9 +intellectuelle d'un tiers portant sur un brevet, un logiciel ou sur tout +autre droit de propri\xE9t\xE9. Ainsi, le Conc\xE9dant exclut toute garantie au +profit du Licenci\xE9 contre les actions en contrefa\xE7on qui pourraient \xEAtre +diligent\xE9es au titre de l'utilisation, de la modification, et de la +redistribution du Logiciel. N\xE9anmoins, si de telles actions sont +exerc\xE9es contre le Licenci\xE9, le Conc\xE9dant lui apportera son aide +technique et juridique pour sa d\xE9fense. Cette aide technique et +juridique est d\xE9termin\xE9e au cas par cas entre le Conc\xE9dant concern\xE9 et +le Licenci\xE9 dans le cadre d'un protocole d'accord. Le Conc\xE9dant d\xE9gage +toute responsabilit\xE9 quant \xE0 l'utilisation de la d\xE9nomination du +Logiciel par le Licenci\xE9. Aucune garantie n'est apport\xE9e quant \xE0 +l'existence de droits ant\xE9rieurs sur le nom du Logiciel et sur +l'existence d'une marque. + + + Article 10 - RESILIATION + +10.1 En cas de manquement par le Licenci\xE9 aux obligations mises \xE0 sa +charge par le Contrat, le Conc\xE9dant pourra r\xE9silier de plein droit le +Contrat trente (30) jours apr\xE8s notification adress\xE9e au Licenci\xE9 et +rest\xE9e sans effet. + +10.2 Le Licenci\xE9 dont le Contrat est r\xE9sili\xE9 n'est plus autoris\xE9 \xE0 +utiliser, modifier ou distribuer le Logiciel. Cependant, toutes les +licences qu'il aura conc\xE9d\xE9es ant\xE9rieurement \xE0 la r\xE9siliation du Contrat +resteront valides sous r\xE9serve qu'elles aient \xE9t\xE9 effectu\xE9es en +conformit\xE9 avec le Contrat. + + + Article 11 - DISPOSITIONS DIVERSES + + + 11.1 CAUSE EXTERIEURE + +Aucune des Parties ne sera responsable d'un retard ou d'une d\xE9faillance +d'ex\xE9cution du Contrat qui serait d\xFB \xE0 un cas de force majeure, un cas +fortuit ou une cause ext\xE9rieure, telle que, notamment, le mauvais +fonctionnement ou les interruptions du r\xE9seau \xE9lectrique ou de +t\xE9l\xE9communication, la paralysie du r\xE9seau li\xE9e \xE0 une attaque +informatique, l'intervention des autorit\xE9s gouvernementales, les +catastrophes naturelles, les d\xE9g\xE2ts des eaux, les tremblements de terre, +le feu, les explosions, les gr\xE8ves et les conflits sociaux, l'\xE9tat de +guerre... + +11.2 Le fait, par l'une ou l'autre des Parties, d'omettre en une ou +plusieurs occasions de se pr\xE9valoir d'une ou plusieurs dispositions du +Contrat, ne pourra en aucun cas impliquer renonciation par la Partie +int\xE9ress\xE9e \xE0 s'en pr\xE9valoir ult\xE9rieurement. + +11.3 Le Contrat annule et remplace toute convention ant\xE9rieure, \xE9crite +ou orale, entre les Parties sur le m\xEAme objet et constitue l'accord +entier entre les Parties sur cet objet. Aucune addition ou modification +aux termes du Contrat n'aura d'effet \xE0 l'\xE9gard des Parties \xE0 moins +d'\xEAtre faite par \xE9crit et sign\xE9e par leurs repr\xE9sentants d\xFBment habilit\xE9s. + +11.4 Dans l'hypoth\xE8se o\xF9 une ou plusieurs des dispositions du Contrat +s'av\xE8rerait contraire \xE0 une loi ou \xE0 un texte applicable, existants ou +futurs, cette loi ou ce texte pr\xE9vaudrait, et les Parties feraient les +amendements n\xE9cessaires pour se conformer \xE0 cette loi ou \xE0 ce texte. +Toutes les autres dispositions resteront en vigueur. De m\xEAme, la +nullit\xE9, pour quelque raison que ce soit, d'une des dispositions du +Contrat ne saurait entra\xEEner la nullit\xE9 de l'ensemble du Contrat. + + + 11.5 LANGUE + +Le Contrat est r\xE9dig\xE9 en langue fran\xE7aise et en langue anglaise, ces +deux versions faisant \xE9galement foi. + + + Article 12 - NOUVELLES VERSIONS DU CONTRAT + +12.1 Toute personne est autoris\xE9e \xE0 copier et distribuer des copies de +ce Contrat. + +12.2 Afin d'en pr\xE9server la coh\xE9rence, le texte du Contrat est prot\xE9g\xE9 +et ne peut \xEAtre modifi\xE9 que par les auteurs de la licence, lesquels se +r\xE9servent le droit de publier p\xE9riodiquement des mises \xE0 jour ou de +nouvelles versions du Contrat, qui poss\xE9deront chacune un num\xE9ro +distinct. Ces versions ult\xE9rieures seront susceptibles de prendre en +compte de nouvelles probl\xE9matiques rencontr\xE9es par les logiciels libres. + +12.3 Tout Logiciel diffus\xE9 sous une version donn\xE9e du Contrat ne pourra +faire l'objet d'une diffusion ult\xE9rieure que sous la m\xEAme version du +Contrat ou une version post\xE9rieure. + + + Article 13 - LOI APPLICABLE ET COMPETENCE TERRITORIALE + +13.1 Le Contrat est r\xE9gi par la loi fran\xE7aise. Les Parties conviennent +de tenter de r\xE9gler \xE0 l'amiable les diff\xE9rends ou litiges qui +viendraient \xE0 se produire par suite ou \xE0 l'occasion du Contrat. + +13.2 A d\xE9faut d'accord amiable dans un d\xE9lai de deux (2) mois \xE0 compter +de leur survenance et sauf situation relevant d'une proc\xE9dure d'urgence, +les diff\xE9rends ou litiges seront port\xE9s par la Partie la plus diligente +devant les Tribunaux comp\xE9tents de Paris. + + +Version 1.0 du 2006-09-05. Added: trunk/src/pyphant/pyphant/quantities/NumberDict.py =================================================================== --- trunk/src/pyphant/pyphant/quantities/NumberDict.py (rev 0) +++ trunk/src/pyphant/pyphant/quantities/NumberDict.py 2008-11-11 11:17:02 UTC (rev 573) @@ -0,0 +1,67 @@ +# Dictionary containing numbers +# +# These objects are meant to be used like arrays with generalized +# indices. Non-existent elements default to zero. Global operations +# are addition, subtraction, and multiplication/division by a scalar. +# +# Written by Konrad Hinsen <hi...@cn...> +# last revision: 2006-10-16 +# + +""" +Dictionary storing numerical values +""" + +class NumberDict(dict): + + """ + Dictionary storing numerical values + + Constructor: NumberDict() + + An instance of this class acts like an array of number with + generalized (non-integer) indices. A value of zero is assumed + for undefined entries. NumberDict instances support addition, + and subtraction with other NumberDict instances, and multiplication + and division by scalars. + """ + + def __getitem__(self, item): + try: + return dict.__getitem__(self, item) + except KeyError: + return 0 + + def __coerce__(self, other): + if type(other) == type({}): + other = NumberDict(other) + return self, other + + def __add__(self, other): + sum_dict = NumberDict() + for key in self.keys(): + sum_dict[key] = self[key] + for key in other.keys(): + sum_dict[key] = sum_dict[key] + other[key] + return sum_dict + + def __sub__(self, other): + sum_dict = NumberDict() + for key in self.keys(): + sum_dict[key] = self[key] + for key in other.keys(): + sum_dict[key] = sum_dict[key] - other[key] + return sum_dict + + def __mul__(self, other): + new = NumberDict() + for key in self.keys(): + new[key] = other*self[key] + return new + __rmul__ = __mul__ + + def __div__(self, other): + new = NumberDict() + for key in self.keys(): + new[key] = self[key]/other + return new Added: trunk/src/pyphant/pyphant/quantities/PhysicalQuantities.py =================================================================== --- trunk/src/pyphant/pyphant/quantities/PhysicalQuantities.py (rev 0) +++ trunk/src/pyphant/pyphant/quantities/PhysicalQuantities.py 2008-11-11 11:17:02 UTC (rev 573) @@ -0,0 +1,843 @@ +# Physical quantities with units +# +# Written by Konrad Hinsen <hi...@cn...> +# with contributions from Greg Ward +# merged into Pyphant by Andreas W. Liehr +# last revision: 2008-11-10 +# + +""" +Physical quantities with units. + +This module provides a data type that represents a physical +quantity together with its unit. It is possible to add and +subtract these quantities if the units are compatible, and +a quantity can be converted to another compatible unit. +Multiplication, subtraction, and raising to integer powers +is allowed without restriction, and the result will have +the correct unit. A quantity can be raised to a non-integer +power only if the result can be represented by integer powers +of the base units. + +The values of physical constants are taken from the 1986 +recommended values from CODATA. Other conversion factors +(e.g. for British units) come from various sources. I can't +guarantee for the correctness of all entries in the unit +table, so use this at your own risk. +""" + +from pyphant.quantities.NumberDict import NumberDict + +import numpy.oldnumeric +def int_sum(a, axis=0): + return numpy.oldnumeric.add.reduce(a, axis) +def zeros_st(shape, other): + return numpy.oldnumeric.zeros(shape, dtype=other.dtype) +from numpy import ndarray as array_type + + +import re, string + + +# Class definitions + +class PhysicalQuantity: + + """ + Physical quantity with units + + PhysicalQuantity instances allow addition, subtraction, + multiplication, and division with each other as well as + multiplication, division, and exponentiation with numbers. + Addition and subtraction check that the units of the two operands + are compatible and return the result in the units of the first + operand. A limited set of mathematical functions (from module + Numeric) is applicable as well: + + - sqrt: equivalent to exponentiation with 0.5. + + - sin, cos, tan: applicable only to objects whose unit is + compatible with 'rad'. + + See the documentation of the PhysicalQuantities module for a list + of the available units. + + Here is an example on usage: + + >>> from PhysicalQuantities import PhysicalQuantity as p # short hand + >>> distance1 = p('10 m') + >>> distance2 = p('10 km') + >>> total = distance1 + distance2 + >>> total + PhysicalQuantity(10010.0,'m') + >>> total.convertToUnit('km') + >>> total.getValue() + 10.01 + >>> total.getUnitName() + 'km' + >>> total = total.inBaseUnits() + >>> total + PhysicalQuantity(10010.0,'m') + >>> + >>> t = p(314159., 's') + >>> # convert to days, hours, minutes, and second: + >>> t2 = t.inUnitsOf('d','h','min','s') + >>> t2_print = ' '.join([str(i) for i in t2]) + >>> t2_print + '3.0 d 15.0 h 15.0 min 59.0 s' + >>> + >>> e = p('2.7 Hartree*Nav') + >>> e.convertToUnit('kcal/mol') + >>> e + PhysicalQuantity(1694.2757596034764,'kcal/mol') + >>> e = e.inBaseUnits() + >>> str(e) + '7088849.77818 kg*m**2/s**2/mol' + >>> + >>> freeze = p('0 degC') + >>> freeze = freeze.inUnitsOf ('degF') + >>> str(freeze) + '32.0 degF' + >>> + """ + + def __init__(self, *args): + """ + There are two constructor calling patterns: + + 1. PhysicalQuantity(value, unit), where value is any number + and unit is a string defining the unit + + 2. PhysicalQuantity(value_with_unit), where value_with_unit + is a string that contains both the value and the unit, + i.e. '1.5 m/s'. This form is provided for more convenient + interactive use. + + @param args: either (value, unit) or (value_with_unit,) + @type args: (number, C{str}) or (C{str},) + """ + if len(args) == 2: + self.value = args[0] + self.unit = _findUnit(args[1]) + else: + s = string.strip(args[0]) + match = PhysicalQuantity._number.match(s) + if match is None: + raise TypeError('No number found') + self.value = string.atof(match.group(0)) + self.unit = _findUnit(s[len(match.group(0)):]) + + _number = re.compile('[+-]?[0-9]+(\\.[0-9]*)?([eE][+-]?[0-9]+)?') + + def __str__(self): + return str(self.value) + ' ' + self.unit.name() + + def __repr__(self): + return (self.__class__.__name__ + '(' + `self.value` + ',' + + `self.unit.name()` + ')') + + def _sum(self, other, sign1, sign2): + if not isPhysicalQuantity(other): + raise TypeError('Incompatible types') + new_value = sign1*self.value + \ + sign2*other.value*other.unit.conversionFactorTo(self.unit) + return self.__class__(new_value, self.unit) + + def __add__(self, other): + return self._sum(other, 1, 1) + + __radd__ = __add__ + + def __sub__(self, other): + return self._sum(other, 1, -1) + + def __rsub__(self, other): + return self._sum(other, -1, 1) + + def __cmp__(self, other): + diff = self._sum(other, 1, -1) + return cmp(diff.value, 0) + + def __mul__(self, other): + if not isPhysicalQuantity(other): + return self.__class__(self.value*other, self.unit) + value = self.value*other.value + unit = self.unit*other.unit + if unit.isDimensionless(): + return value*unit.factor + else: + return self.__class__(value, unit) + + __rmul__ = __mul__ + + def __div__(self, other): + if not isPhysicalQuantity(other): + return self.__class__(self.value/other, self.unit) + value = self.value/other.value + unit = self.unit/other.unit + if unit.isDimensionless(): + return value*unit.factor + else: + return self.__class__(value, unit) + + def __rdiv__(self, other): + if not isPhysicalQuantity(other): + return self.__class__(other/self.value, pow(self.unit, -1)) + value = other.value/self.value + unit = other.unit/self.unit + if unit.isDimensionless(): + return value*unit.factor + else: + return self.__class__(value, unit) + + def __pow__(self, other): + if isPhysicalQuantity(other): + raise TypeError('Exponents must be dimensionless') + return self.__class__(pow(self.value, other), pow(self.unit, other)) + + def __rpow__(self, other): + raise TypeError('Exponents must be dimensionless') + + def __abs__(self): + return self.__class__(abs(self.value), self.unit) + + def __pos__(self): + return self + + def __neg__(self): + return self.__class__(-self.value, self.unit) + + def __nonzero__(self): + return self.value != 0 + + def convertToUnit(self, unit): + """ + Change the unit and adjust the value such that + the combination is equivalent to the original one. The new unit + must be compatible with the previous unit of the object. + + @param unit: a unit + @type unit: C{str} + @raise TypeError: if the unit string is not a know unit or a + unit incompatible with the current one + """ + unit = _findUnit(unit) + self.value = _convertValue (self.value, self.unit, unit) + self.unit = unit + + def inUnitsOf(self, *units): + """ + Express the quantity in different units. If one unit is + specified, a new PhysicalQuantity object is returned that + expresses the quantity in that unit. If several units + are specified, the return value is a tuple of + PhysicalObject instances with with one element per unit such + that the sum of all quantities in the tuple equals the the + original quantity and all the values except for the last one + are integers. This is used to convert to irregular unit + systems like hour/minute/second. + + @param units: one or several units + @type units: C{str} or sequence of C{str} + @returns: one or more physical quantities + @rtype: L{PhysicalQuantity} or C{tuple} of L{PhysicalQuantity} + @raises TypeError: if any of the specified units are not compatible + with the original unit + """ + units = map(_findUnit, units) + if len(units) == 1: + unit = units[0] + value = _convertValue (self.value, self.unit, unit) + return self.__class__(value, unit) + else: + units.sort() + result = [] + value = self.value + unit = self.unit + for i in range(len(units)-1,-1,-1): + value = value*unit.conversionFactorTo(units[i]) + if i == 0: + rounded = value + else: + rounded = _round(value) + result.append(self.__class__(rounded, units[i])) + value = value - rounded + unit = units[i] + return tuple(result) + + # Contributed by Berthold Hoellmann + def inBaseUnits(self): + """ + @returns: the same quantity converted to base units, + i.e. SI units in most cases + @rtype: L{PhysicalQuantity} + """ + new_value = self.value * self.unit.factor + num = '' + denom = '' + for i in xrange(9): + unit = _base_names[i] + power = self.unit.powers[i] + if power < 0: + denom = denom + '/' + unit + if power < -1: + denom = denom + '**' + str(-power) + elif power > 0: + num = num + '*' + unit + if power > 1: + num = num + '**' + str(power) + if len(num) == 0: + num = '1' + else: + num = num[1:] + return self.__class__(new_value, num + denom) + + def isCompatible (self, unit): + """ + @param unit: a unit + @type unit: C{str} + @returns: C{True} if the specified unit is compatible with the + one of the quantity + @rtype: C{bool} + """ + unit = _findUnit (unit) + return self.unit.isCompatible (unit) + + def getValue(self): + """Return value (float) of physical quantity (no unit).""" + return self.value + + def getUnitName(self): + """Return unit (string) of physical quantity.""" + return self.unit.name() + + def sqrt(self): + return pow(self, 0.5) + + def sin(self): + if self.unit.isAngle(): + return numpy.oldnumeric.sin(self.value * \ + self.unit.conversionFactorTo(_unit_table['rad'])) + else: + raise TypeError('Argument of sin must be an angle') + + def cos(self): + if self.unit.isAngle(): + return numpy.oldnumeric.cos(self.value * \ + self.unit.conversionFactorTo(_unit_table['rad'])) + else: + raise TypeError('Argument of cos must be an angle') + + def tan(self): + if self.unit.isAngle(): + return numpy.oldnumeric.tan(self.value * \ + self.unit.conversionFactorTo(_unit_table['rad'])) + else: + raise TypeError('Argument of tan must be an angle') + + +class PhysicalUnit: + + """ + Physical unit + + A physical unit is defined by a name (possibly composite), a scaling + factor, and the exponentials of each of the SI base units that enter into + it. Units can be multiplied, divided, and raised to integer powers. + """ + + def __init__(self, names, factor, powers, offset=0): + """ + @param names: a dictionary mapping each name component to its + associated integer power (e.g. C{{'m': 1, 's': -1}}) + for M{m/s}). As a shorthand, a string may be passed + which is assigned an implicit power 1. + @type names: C{dict} or C{str} + @param factor: a scaling factor + @type factor: C{float} + @param powers: the integer powers for each of the nine base units + @type powers: C{list} of C{int} + @param offset: an additive offset to the base unit (used only for + temperatures) + @type offset: C{float} + """ + if type(names) == type(''): + self.names = NumberDict() + self.names[names] = 1 + else: + self.names = names + self.factor = factor + self.offset = offset + self.powers = powers + + def __repr__(self): + return '<PhysicalUnit ' + self.name() + '>' + + __str__ = __repr__ + + def __cmp__(self, other): + if self.powers != other.powers: + raise TypeError('Incompatible units') + return cmp(self.factor, other.factor) + + def __mul__(self, other): + if self.offset != 0 or (isPhysicalUnit (other) and other.offset != 0): + raise TypeError("cannot multiply units with non-zero offset") + if isPhysicalUnit(other): + return PhysicalUnit(self.names+other.names, + self.factor*other.factor, + map(lambda a,b: a+b, + self.powers, other.powers)) + else: + return PhysicalUnit(self.names+{str(other): 1}, + self.factor*other, + self.powers, + self.offset * other) + + __rmul__ = __mul__ + + def __div__(self, other): + if self.offset != 0 or (isPhysicalUnit (other) and other.offset != 0): + raise TypeError("cannot divide units with non-zero offset") + if isPhysicalUnit(other): + return PhysicalUnit(self.names-other.names, + self.factor/other.factor, + map(lambda a,b: a-b, + self.powers, other.powers)) + else: + return PhysicalUnit(self.names+{str(other): -1}, + self.factor/other, self.powers) + + def __rdiv__(self, other): + if self.offset != 0 or (isPhysicalUnit (other) and other.offset != 0): + raise TypeError("cannot divide units with non-zero offset") + if isPhysicalUnit(other): + return PhysicalUnit(other.names-self.names, + other.factor/self.factor, + map(lambda a,b: a-b, + other.powers, self.powers)) + else: + return PhysicalUnit({str(other): 1}-self.names, + other/self.factor, + map(lambda x: -x, self.powers)) + + def __pow__(self, other): + if self.offset != 0: + raise TypeError("cannot exponentiate units with non-zero offset") + if isinstance(other, int): + return PhysicalUnit(other*self.names, pow(self.factor, other), + map(lambda x,p=other: x*p, self.powers)) + if isinstance(other, float): + inv_exp = 1./other + rounded = int(numpy.oldnumeric.floor(inv_exp+0.5)) + if abs(inv_exp-rounded) < 1.e-10: + if reduce(lambda a, b: a and b, + map(lambda x, e=rounded: x%e == 0, self.powers)): + f = pow(self.factor, other) + p = map(lambda x,p=rounded: x/p, self.powers) + if reduce(lambda a, b: a and b, + map(lambda x, e=rounded: x%e == 0, + self.names.values())): + names = self.names/rounded + else: + names = NumberDict() + if f != 1.: + names[str(f)] = 1 + for i in range(len(p)): + names[_base_names[i]] = p[i] + return PhysicalUnit(names, f, p) + else: + raise TypeError('Illegal exponent') + raise TypeError('Only integer and inverse integer exponents allowed') + + def conversionFactorTo(self, other): + """ + @param other: another unit + @type other: L{PhysicalUnit} + @returns: the conversion factor from this unit to another unit + @rtype: C{float} + @raises TypeError: if the units are not compatible + """ + if self.powers != other.powers: + raise TypeError('Incompatible units') + if self.offset != other.offset and self.factor != other.factor: + raise TypeError(('Unit conversion (%s to %s) cannot be expressed ' + + 'as a simple multiplicative factor') % \ + (self.name(), other.name())) + return self.factor/other.factor + + def conversionTupleTo(self, other): # added 1998/09/29 GPW + """ + @param other: another unit + @type other: L{PhysicalUnit} + @returns: the conversion factor and offset from this unit to + another unit + @rtype: (C{float}, C{float}) + @raises TypeError: if the units are not compatible + """ + if self.powers != other.powers: + raise TypeError('Incompatible units') + + # let (s1,d1) be the conversion tuple from 'self' to base units + # (ie. (x+d1)*s1 converts a value x from 'self' to base units, + # and (x/s1)-d1 converts x from base to 'self' units) + # and (s2,d2) be the conversion tuple from 'other' to base units + # then we want to compute the conversion tuple (S,D) from + # 'self' to 'other' such that (x+D)*S converts x from 'self' + # units to 'other' units + # the formula to convert x from 'self' to 'other' units via the + # base units is (by definition of the conversion tuples): + # ( ((x+d1)*s1) / s2 ) - d2 + # = ( (x+d1) * s1/s2) - d2 + # = ( (x+d1) * s1/s2 ) - (d2*s2/s1) * s1/s2 + # = ( (x+d1) - (d1*s2/s1) ) * s1/s2 + # = (x + d1 - d2*s2/s1) * s1/s2 + # thus, D = d1 - d2*s2/s1 and S = s1/s2 + factor = self.factor / other.factor + offset = self.offset - (other.offset * other.factor / self.factor) + return (factor, offset) + + def isCompatible (self, other): # added 1998/10/01 GPW + """ + @param other: another unit + @type other: L{PhysicalUnit} + @returns: C{True} if the units are compatible, i.e. if the powers of + the base units are the same + @rtype: C{bool} + """ + return self.powers == other.powers + + def isDimensionless(self): + return not reduce(lambda a,b: a or b, self.powers) + + def isAngle(self): + return self.powers[7] == 1 and \ + reduce(lambda a,b: a + b, self.powers) == 1 + + def setName(self, name): + self.names = NumberDict() + self.names[name] = 1 + + def name(self): + num = '' + denom = '' + for unit in self.names.keys(): + power = self.names[unit] + if power < 0: + denom = denom + '/' + unit + if power < -1: + denom = denom + '**' + str(-power) + elif power > 0: + num = num + '*' + unit + if power > 1: + num = num + '**' + str(power) + if len(num) == 0: + num = '1' + else: + num = num[1:] + return num + denom + + +# Type checks + +def isPhysicalUnit(x): + """ + @param x: an object + @type x: any + @returns: C{True} if x is a L{PhysicalUnit} + @rtype: C{bool} + """ + return hasattr(x, 'factor') and hasattr(x, 'powers') + +def isPhysicalQuantity(x): + """ + @param x: an object + @type x: any + @returns: C{True} if x is a L{PhysicalQuantity} + @rtype: C{bool} + """ + return hasattr(x, 'value') and hasattr(x, 'unit') + + +# Helper functions + +def _findUnit(unit): + if type(unit) == type(''): + name = string.strip(unit) + unit = eval(name, _unit_table) + for cruft in ['__builtins__', '__args__']: + try: del _unit_table[cruft] + except: pass + + if not isPhysicalUnit(unit): + raise TypeError(str(unit) + ' is not a unit') + return unit + +def _round(x): + if numpy.oldnumeric.greater(x, 0.): + return numpy.oldnumeric.floor(x) + else: + return numpy.oldnumeric.ceil(x) + + +def _convertValue (value, src_unit, target_unit): + (factor, offset) = src_unit.conversionTupleTo(target_unit) + return (value + offset) * factor + + +# SI unit definitions + +_base_names = ['m', 'kg', 's', 'A', 'K', 'mol', 'cd', 'rad', 'sr'] + +_base_units = [('m', PhysicalUnit('m', 1., [1,0,0,0,0,0,0,0,0])), + ('g', PhysicalUnit('g', 0.001, [0,1,0,0,0,0,0,0,0])), + ('s', PhysicalUnit('s', 1., [0,0,1,0,0,0,0,0,0])), + ('A', PhysicalUnit('A', 1., [0,0,0,1,0,0,0,0,0])), + ('K', PhysicalUnit('K', 1., [0,0,0,0,1,0,0,0,0])), + ('mol', PhysicalUnit('mol', 1., [0,0,0,0,0,1,0,0,0])), + ('cd', PhysicalUnit('cd', 1., [0,0,0,0,0,0,1,0,0])), + ('rad', PhysicalUnit('rad', 1., [0,0,0,0,0,0,0,1,0])), + ('sr', PhysicalUnit('sr', 1., [0,0,0,0,0,0,0,0,1])), + ] + +_prefixes = [('Y', 1.e24), + ('Z', 1.e21), + ('E', 1.e18), + ('P', 1.e15), + ('T', 1.e12), + ('G', 1.e9), + ('M', 1.e6), + ('k', 1.e3), + ('h', 1.e2), + ('da', 1.e1), + ('d', 1.e-1), + ('c', 1.e-2), + ('m', 1.e-3), + ('mu', 1.e-6), + ('n', 1.e-9), + ('p', 1.e-12), + ('f', 1.e-15), + ('a', 1.e-18), + ('z', 1.e-21), + ('y', 1.e-24), + ] + +_unit_table = {} + +for unit in _base_units: + _unit_table[unit[0]] = unit[1] + +_help = [] + +def _addUnit(name, unit, comment=''): + if _unit_table.has_key(name): + raise KeyError, 'Unit ' + name + ' already defined' + if comment: + _help.append((name, comment, unit)) + if type(unit) == type(''): + unit = eval(unit, _unit_table) + for cruft in ['__builtins__', '__args__']: + try: del _unit_table[cruft] + except: pass + unit.setName(name) + _unit_table[name] = unit + +def _addPrefixed(unit): + _help.append('Prefixed units for %s:' % unit) + _prefixed_names = [] + for prefix in _prefixes: + name = prefix[0] + unit + _addUnit(name, prefix[1]*_unit_table[unit]) + _prefixed_names.append(name) + _help.append(', '.join(_prefixed_names)) + + +# SI derived units; these automatically get prefixes +_help.append('SI derived units; these automatically get prefixes:\n' + \ + ', '.join([prefix + ' (%.0E)' % value for prefix, value in _prefixes]) + \ + '\n') + + +_unit_table['kg'] = PhysicalUnit('kg', 1., [0,1,0,0,0,0,0,0,0]) + +_addUnit('Hz', '1/s', 'Hertz') +_addUnit('N', 'm*kg/s**2', 'Newton') +_addUnit('Pa', 'N/m**2', 'Pascal') +_addUnit('J', 'N*m', 'Joule') +_addUnit('W', 'J/s', 'Watt') +_addUnit('C', 's*A', 'Coulomb') +_addUnit('V', 'W/A', 'Volt') +_addUnit('F', 'C/V', 'Farad') +_addUnit('ohm', 'V/A', 'Ohm') +_addUnit('S', 'A/V', 'Siemens') +_addUnit('Wb', 'V*s', 'Weber') +_addUnit('T', 'Wb/m**2', 'Tesla') +_addUnit('H', 'Wb/A', 'Henry') +_addUnit('lm', 'cd*sr', 'Lumen') +_addUnit('lx', 'lm/m**2', 'Lux') +_addUnit('Bq', '1/s', 'Becquerel') +_addUnit('Gy', 'J/kg', 'Gray') +_addUnit('Sv', 'J/kg', 'Sievert') + +del _unit_table['kg'] + +for unit in _unit_table.keys(): + _addPrefixed(unit) + +# Fundamental constants +_help.append('Fundamental constants:') + +_unit_table['pi'] = numpy.oldnumeric.pi +_addUnit('c', '299792458.*m/s', 'speed of light') +_addUnit('mu0', '4.e-7*pi*N/A**2', 'permeability of vacuum') +_addUnit('eps0', '1/mu0/c**2', 'permittivity of vacuum') +_addUnit('Grav', '6.67259e-11*m**3/kg/s**2', 'gravitational constant') +_addUnit('hplanck', '6.6260755e-34*J*s', 'Planck constant') +_addUnit('hbar', 'hplanck/(2*pi)', 'Planck constant / 2pi') +_addUnit('e', '1.60217733e-19*C', 'elementary charge') +_addUnit('me', '9.1093897e-31*kg', 'electron mass') +_addUnit('mp', '1.6726231e-27*kg', 'proton mass') +_addUnit('Nav', '6.0221367e23/mol', 'Avogadro number') +_addUnit('k', '1.380658e-23*J/K', 'Boltzmann constant') + +# Time units +_help.append('Time units:') + +_addUnit('min', '60*s', 'minute') +_addUnit('h', '60*min', 'hour') +_addUnit('d', '24*h', 'day') +_addUnit('wk', '7*d', 'week') +_addUnit('yr', '365.25*d', 'year') + +# Length units +_help.append('Length units:') + +_addUnit('inch', '2.54*cm', 'inch') +_addUnit('ft', '12*inch', 'foot') +_addUnit('yd', '3*ft', 'yard') +_addUnit('mi', '5280.*ft', '(British) mile') +_addUnit('nmi', '1852.*m', 'Nautical mile') +_addUnit('Ang', '1.e-10*m', 'Angstrom') +_addUnit('lyr', 'c*yr', 'light year') +_addUnit('Bohr', '4*pi*eps0*hbar**2/me/e**2', 'Bohr radius') + +# Area units +_help.append('Area units:') + +_addUnit('ha', '10000*m**2', 'hectare') +_addUnit('acres', 'mi**2/640', 'acre') +_addUnit('b', '1.e-28*m', 'barn') + +# Volume units +_help.append('Volume units:') + +_addUnit('l', 'dm**3', 'liter') +_addUnit('dl', '0.1*l', 'deci liter') +_addUnit('cl', '0.01*l', 'centi liter') +_addUnit('ml', '0.001*l', 'milli liter') +_addUnit('tsp', '4.92892159375*ml', 'teaspoon') +_addUnit('tbsp', '3*tsp', 'tablespoon') +_addUnit('floz', '2*tbsp', 'fluid ounce') +_addUnit('cup', '8*floz', 'cup') +_addUnit('pt', '16*floz', 'pint') +_addUnit('qt', '2*pt', 'quart') +_addUnit('galUS', '4*qt', 'US gallon') +_addUnit('galUK', '4.54609*l', 'British gallon') + +# Mass units +_help.append('Mass units:') + +_addUnit('amu', '1.6605402e-27*kg', 'atomic mass units') +_addUnit('oz', '28.349523125*g', 'ounce') +_addUnit('lb', '16*oz', 'pound') +_addUnit('ton', '2000*lb', 'ton') + +# Force units +_help.append('Force units:') + +_addUnit('dyn', '1.e-5*N', 'dyne (cgs unit)') + +# Energy units +_help.append('Energy units:') + +_addUnit('erg', '1.e-7*J', 'erg (cgs unit)') +_addUnit('eV', 'e*V', 'electron volt') +_addUnit('Hartree', 'me*e**4/16/pi**2/eps0**2/hbar**2', 'Wavenumbers/inverse cm') +_addUnit('Ken', 'k*K', 'Kelvin as energy unit') +_addUnit('cal', '4.184*J', 'thermochemical calorie') +_addUnit('kcal', '1000*cal', 'thermochemical kilocalorie') +_addUnit('cali', '4.1868*J', 'international calorie') +_addUnit('kcali', '1000*cali', 'international kilocalorie') +_addUnit('Btu', '1055.05585262*J', 'British thermal unit') + +_addPrefixed('eV') + +# Power units +_help.append('Power units:') + +_addUnit('hp', '745.7*W', 'horsepower') + +# Pressure units +_help.append('Pressure units:') + +_addUnit('bar', '1.e5*Pa', 'bar (cgs unit)') +_addUnit('atm', '101325.*Pa', 'standard atmosphere') +_addUnit('torr', 'atm/760', 'torr = mm of mercury') +_addUnit('psi', '6894.75729317*Pa', 'pounds per square inch') + +# Angle units +_help.append('Angle units:') + +_addUnit('deg', 'pi*rad/180', 'degrees') + +_help.append('Temperature units:') +# Temperature units -- can't use the 'eval' trick that _addUnit provides +# for degC and degF because you can't add units +kelvin = _findUnit ('K') +_addUnit ('degR', '(5./9.)*K', 'degrees Rankine') +_addUnit ('degC', PhysicalUnit (None, 1.0, kelvin.powers, 273.15), + 'degrees Celcius') +_addUnit ('degF', PhysicalUnit (None, 5./9., kelvin.powers, 459.67), + 'degree Fahrenheit') +del kelvin + + +def description(): + """Return a string describing all available units.""" + s = '' # collector for description text + for entry in _help: + if isinstance(entry, basestring): + # headline for new section + s += '\n' + entry + '\n' + elif isinstance(entry, tuple): + name, comment, unit = entry + s += '%-8s %-26s %s\n' % (name, comment, unit) + else: + # impossible + raise TypeError, 'wrong construction of _help list' + return s + +# add the description of the units to the module's doc string: +__doc__ += '\n' + description() + +# Some demonstration code. Run with "python -i PhysicalQuantities.py" +# to have this available. + +if __name__ == '__main__': + +# from Scientific.N import * + l = PhysicalQuantity(10., 'm... [truncated message content] |
From: <zk...@us...> - 2009-01-20 15:23:36
|
Revision: 601 http://pyphant.svn.sourceforge.net/pyphant/?rev=601&view=rev Author: zklaus Date: 2009-01-20 15:23:29 +0000 (Tue, 20 Jan 2009) Log Message: ----------- Refactured quantities parsers of FMFLoader into new module of the Pyphant core. Modified Paths: -------------- trunk/src/workers/fmfile/fmfile/FMFLoader.py Added Paths: ----------- trunk/src/pyphant/pyphant/quantities/ParseQuantities.py Added: trunk/src/pyphant/pyphant/quantities/ParseQuantities.py =================================================================== --- trunk/src/pyphant/pyphant/quantities/ParseQuantities.py (rev 0) +++ trunk/src/pyphant/pyphant/quantities/ParseQuantities.py 2009-01-20 15:23:29 UTC (rev 601) @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) 2008, Rectorate of the University of Freiburg +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the Freiburg Materials Research Center, +# University of Freiburg nor the names of its contributors may be used to +# endorse or promote products derived from this software without specific +# prior written permission. +# +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER +# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +u""" +""" + +__id__ = "$Id$" +__author__ = "$Author$" +__version__ = "$Revision$" +# $Source$ + +from PhysicalQuantities import PhysicalQuantity + +def str2unit(unit): + if unit.startswith('.'): + unit = '0'+unit + elif unit.endswith('%'): + unit = float(unit[:-1])/100.0 + elif unit.endswith('a.u.'): + try: + unit = float(unit[:-4]) + except: + unit = 1.0 + elif not (unit[0].isdigit() or unit[0]=='-'): + unit = '1'+unit + try: + unit = unit.replace('^', '**') + unit = PhysicalQuantity(unit.encode('utf-8')) + except: + unit = float(unit) + return unit + +def parseQuantity(value): + import re + pm = re.compile(ur"(?:\\pm|\+-|\+/-)") + try: + value, error = [s.strip() for s in pm.split(value)] + except: + error = None + if value.startswith('('): + value = float(value[1:]) + error, unit = [s.strip() for s in error.split(')')] + unit = str2unit(unit) + value *= unit + else: + value = str2unit(value) + if error != None: + if error.endswith('%'): + error = value*float(error[:-1])/100.0 + else: + try: + error = float(error)*unit + except: + error = str2unit(error) + return value, error + +def parseVariable(oldVal): + shortname, value = tuple([s.strip() for s in oldVal.split('=')]) + value, error = parseQuantity(value) + return (shortname, value, error) Modified: trunk/src/workers/fmfile/fmfile/FMFLoader.py =================================================================== --- trunk/src/workers/fmfile/fmfile/FMFLoader.py 2009-01-20 12:59:31 UTC (rev 600) +++ trunk/src/workers/fmfile/fmfile/FMFLoader.py 2009-01-20 15:23:29 UTC (rev 601) @@ -41,6 +41,7 @@ from pyphant.core import (Worker, Connectors, Param, DataContainer) from pyphant.quantities.PhysicalQuantities import PhysicalQuantity,isPhysicalUnit,isPhysicalQuantity +from pyphant.quantities.ParseQuantities import parseQuantity, parseVariable, str2unit import mx.DateTime.ISO import logging _logger = logging.getLogger("pyphant") @@ -240,61 +241,14 @@ config = FMFConfigObj(d.encode('utf-8').splitlines(), encoding='utf-8') return config2tables(preParsedData, config) -def str2unit(unit): - if unit.startswith('.'): - unit = '0'+unit - elif unit == '%': - unit = 0.01 - elif unit.endswith('a.u.'): - try: - unit = float(unit[:-4]) - except: - unit = 1.0 - elif not (unit[0].isdigit() or unit[0]=='-'): - unit = '1'+unit - try: - unit = unit.replace('^', '**') - unit = PhysicalQuantity(unit.encode('utf-8')) - except: - unit = float(unit) - return unit +def parseBool(value): + if value.lower() == 'true': + return True + elif value.lower() == 'false': + return False + raise AttributeError def config2tables(preParsedData, config): - def parseVariable(oldVal): - shortname, value = tuple([s.strip() for s in oldVal.split('=')]) - value, error = parseQuantity(value) - return (shortname, value, error) - - def parseQuantity(value): - pm = re.compile(ur"(?:\\pm|\+-|\+/-)") - try: - value, error = [s.strip() for s in pm.split(value)] - except: - error = None - if value.startswith('('): - value = float(value[1:]) - error, unit = [s.strip() for s in error.split(')')] - unit = str2unit(unit) - value *= unit - else: - value = str2unit(value) - if error != None: - if error.endswith('%'): - error = value*float(error[:-1])/100.0 - else: - try: - error = float(error)*unit - except: - error = str2unit(error) - return value, error - - def parseBool(value): - if value.lower() == 'true': - return True - elif value.lower() == 'false': - return False - raise AttributeError - converters = [ int, float, This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <zk...@us...> - 2009-01-20 15:24:27
|
Revision: 602 http://pyphant.svn.sourceforge.net/pyphant/?rev=602&view=rev Author: zklaus Date: 2009-01-20 15:24:22 +0000 (Tue, 20 Jan 2009) Log Message: ----------- Added new CoverageWorker The CoverageWorker of the ImageProcessing toolbox chooses the threshold for seperating the features from the background by taking into account the mixing ratio and the densities of the blended materials. Modified Paths: -------------- trunk/src/pyphant/pyphant/core/FieldContainer.py trunk/src/workers/ImageProcessing/ImageProcessing/__init__.py Added Paths: ----------- trunk/src/workers/ImageProcessing/ImageProcessing/CoverageThreshold.py Modified: trunk/src/pyphant/pyphant/core/FieldContainer.py =================================================================== --- trunk/src/pyphant/pyphant/core/FieldContainer.py 2009-01-20 15:23:29 UTC (rev 601) +++ trunk/src/pyphant/pyphant/core/FieldContainer.py 2009-01-20 15:24:22 UTC (rev 602) @@ -474,7 +474,7 @@ if not self._dimensions[i] == other.dimensions[i]: return NotImplemented if isPhysicalQuantity(self.unit): - if not self.unit.isCompatible(other.unit.unit): + if not (isPhysicalQuantity(other.unit) and self.unit.isCompatible(other.unit.unit)): return NotImplemented if self.unit >= other.unit: data = self.data - (other.data*other.unit.value*other.unit.unit.conversionFactorTo(self.unit.unit))/self.unit.value Added: trunk/src/workers/ImageProcessing/ImageProcessing/CoverageThreshold.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/CoverageThreshold.py (rev 0) +++ trunk/src/workers/ImageProcessing/ImageProcessing/CoverageThreshold.py 2009-01-20 15:24:22 UTC (rev 602) @@ -0,0 +1,84 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) 2006-2007, Rectorate of the University of Freiburg +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the Freiburg Materials Research Center, +# University of Freiburg nor the names of its contributors may be used to +# endorse or promote products derived from this software without specific +# prior written permission. +# +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER +# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +u""" +""" + +__id__ = "$Id$" +__author__ = "$Author$" +__version__ = "$Revision$" +# $Source$ + +from pyphant.core import Worker, Connectors,\ + Param, DataContainer + +def weight2Coverage(w1, rho1, rho2): + return (w1*rho2)/(w1*rho2+(1.0-w1)*rho1) + +def calculateThreshold(image, coveragePercent): + import scipy + data = image.data + histogram = scipy.histogram(data, len(scipy.unique(data))) + cumsum = scipy.cumsum(histogram[0]) + targetValue = cumsum[-1]*coveragePercent + index = scipy.argmin(scipy.absolute(cumsum-targetValue)) + threshold = histogram[1][index] + return threshold*image.unit + +class CoverageWorker(Worker.Worker): + API = 2 + VERSION = 1 + REVISION = "$Revision$"[11:-1] + name = "Coverage worker" + _sockets = [("image", Connectors.TYPE_IMAGE)] + _params = [("w1", "Weight per cent of the dark material", "25%", None), + ("rho1", "Density of the dark material", "0.97g/cm**3", None), + ("rho2", "Density of the light material", "1.29g/cm**3", None), + ] + + @Worker.plug(Connectors.TYPE_IMAGE) + def threshold(self, image, subscriber=0): + from pyphant.quantities.ParseQuantities import parseQuantity + w1 = parseQuantity(self.paramW1.value)[0] + rho1 = parseQuantity(self.paramRho1.value) + rho2 = parseQuantity(self.paramRho2.value) + coveragePercent = weight2Coverage(w1, rho1, rho2) + th = calculateThreshold(image, coveragePercent) + resultArray = scipy.where( image.data < th, + ImageProcessing.FEATURE_COLOR, + ImageProcessing.BACKGROUND_COLOR ) + result = DataContainer.FieldContainer(resultArray, + dimensions=copy.deepcopy(image.dimensions), + longname=u"Binary Image", shortname=u"B") + result.seal() + return result + + Modified: trunk/src/workers/ImageProcessing/ImageProcessing/__init__.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/__init__.py 2009-01-20 15:23:29 UTC (rev 601) +++ trunk/src/workers/ImageProcessing/ImageProcessing/__init__.py 2009-01-20 15:24:22 UTC (rev 602) @@ -42,6 +42,7 @@ workers=[ "ApplyMask", + "CoverageWorker", "DiffWorker", "DistanceMapper", "EdgeFillWorker", This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <zk...@us...> - 2009-02-18 15:19:03
|
Revision: 615 http://pyphant.svn.sourceforge.net/pyphant/?rev=615&view=rev Author: zklaus Date: 2009-02-18 15:18:57 +0000 (Wed, 18 Feb 2009) Log Message: ----------- This employs a new naming scheme for the distributions: The distribution names are now all lowercase. Pyphant remains pyphant. The toolboxes are now named pyphant.<toolbox>. Modified Paths: -------------- trunk/src/pyphant/setup.py trunk/src/workers/ImageProcessing/setup.py trunk/src/workers/OSC/setup.py trunk/src/workers/Statistics/setup.py trunk/src/workers/fmfile/setup.py Modified: trunk/src/pyphant/setup.py =================================================================== --- trunk/src/pyphant/setup.py 2009-02-12 07:48:06 UTC (rev 614) +++ trunk/src/pyphant/setup.py 2009-02-18 15:18:57 UTC (rev 615) @@ -9,7 +9,7 @@ from setuptools import setup, find_packages -setup( name='Pyphant', +setup( name='pyphant', version = VERSION, description='Workflow modelling app', author='Klaus Zimmermann, Andreas W. Liehr', @@ -18,13 +18,13 @@ maintainer_email='zk...@so...', license = "BSD", url='http://pyphant.sourceforge.net/', - install_requires=['sogl>=0.2.0' -## The following are required, but currently not setuptools enabled. -# ,'ScientificPython>=2.6', -# ,'matplotlib>=0.90.1', -# ,'scipy>=0.5.2', -# ,'tables>=1.4', -# ,'wxPython>=2.6.3.2' + install_requires=['sogl>=0.2.0', + ## The following are required, but currently not setuptools enabled. + #'ScientificPython>=2.6', + #'matplotlib>=0.90.1', + #'scipy>=0.5.2', + #'tables>=1.4', + #'wxPython>=2.6.3.2', ], packages = find_packages(), entry_points={'gui_scripts':['wxPyphant = pyphant.wxgui2.wxPyphantApplication:startWxPyphant']}, Modified: trunk/src/workers/ImageProcessing/setup.py =================================================================== --- trunk/src/workers/ImageProcessing/setup.py 2009-02-12 07:48:06 UTC (rev 614) +++ trunk/src/workers/ImageProcessing/setup.py 2009-02-18 15:18:57 UTC (rev 615) @@ -19,7 +19,7 @@ import setuptools setuptools.setup( - name = "Pyphant ImageProcessing", + name = "pyphant.imageprocessing", version = VERSION, author = __author__, description = __doc__, Modified: trunk/src/workers/OSC/setup.py =================================================================== --- trunk/src/workers/OSC/setup.py 2009-02-12 07:48:06 UTC (rev 614) +++ trunk/src/workers/OSC/setup.py 2009-02-18 15:18:57 UTC (rev 615) @@ -16,7 +16,7 @@ import setuptools setuptools.setup( - name = "Pyphant OSC", + name = "pyphant.osc", version = VERSION, author = __author__, description = __doc__, Modified: trunk/src/workers/Statistics/setup.py =================================================================== --- trunk/src/workers/Statistics/setup.py 2009-02-12 07:48:06 UTC (rev 614) +++ trunk/src/workers/Statistics/setup.py 2009-02-18 15:18:57 UTC (rev 615) @@ -19,11 +19,11 @@ import setuptools setuptools.setup( - name = "Pyphant Statistics", + name = "pyphant.statistics", version = VERSION, author = __author__, description = __doc__, - install_requires=['Pyphant>=0.4alpha3'], + install_requires=['pyphant>=0.4alpha3'], packages = ['Statistics'], entry_points = """ [pyphant.workers] Modified: trunk/src/workers/fmfile/setup.py =================================================================== --- trunk/src/workers/fmfile/setup.py 2009-02-12 07:48:06 UTC (rev 614) +++ trunk/src/workers/fmfile/setup.py 2009-02-18 15:18:57 UTC (rev 615) @@ -16,7 +16,7 @@ import setuptools setuptools.setup( - name = "Pyphant FMF", + name = "pyphant.fmf", version = VERSION, author = __author__, description = __doc__, This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <zk...@us...> - 2009-02-18 15:44:06
|
Revision: 616 http://pyphant.svn.sourceforge.net/pyphant/?rev=616&view=rev Author: zklaus Date: 2009-02-18 15:44:01 +0000 (Wed, 18 Feb 2009) Log Message: ----------- This adapts the various requires to the new distribution naming scheme. Modified Paths: -------------- trunk/src/pyphant/pyphant/tests/TestChart.py trunk/src/pyphant/pyphant/tests/TestDataContainer.py trunk/src/pyphant/pyphant/tests/TestEventDispatcher.py trunk/src/pyphant/pyphant/tests/TestImageVisualizer.py trunk/src/pyphant/pyphant/tests/TestPyTablesPersister.py trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestApplyMask.py trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestDistanceMapper.py trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestImageLoader.py trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestMedianiser.py trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestSkeletonizeFeature.py trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestSlopeCalculator.py trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestThresholdingWorker.py trunk/src/workers/OSC/OSC/tests/TestErrorEstimator.py trunk/src/workers/OSC/OSC/tests/TestExtremumFinder.py trunk/src/workers/OSC/OSC/tests/TestMRA.py trunk/src/workers/OSC/OSC/tests/TestOscAbsorption.py trunk/src/workers/Statistics/Statistics/tests/TestHistogram.py trunk/src/workers/fmfile/fmfile/tests/TestFMFLoader.py Modified: trunk/src/pyphant/pyphant/tests/TestChart.py =================================================================== --- trunk/src/pyphant/pyphant/tests/TestChart.py 2009-02-18 15:18:57 UTC (rev 615) +++ trunk/src/pyphant/pyphant/tests/TestChart.py 2009-02-18 15:44:01 UTC (rev 616) @@ -44,8 +44,8 @@ import pkg_resources -pkg_resources.require("Pyphant") -pkg_resources.require("Pyphant_OSC") +pkg_resources.require("pyphant") +pkg_resources.require("pyphant.osc") import numpy from pyphant.visualizers.Chart import LineChart,ScatterPlot Modified: trunk/src/pyphant/pyphant/tests/TestDataContainer.py =================================================================== --- trunk/src/pyphant/pyphant/tests/TestDataContainer.py 2009-02-18 15:18:57 UTC (rev 615) +++ trunk/src/pyphant/pyphant/tests/TestDataContainer.py 2009-02-18 15:44:01 UTC (rev 616) @@ -40,7 +40,7 @@ import unittest import pkg_resources -pkg_resources.require("Pyphant") +pkg_resources.require("pyphant") import scipy import copy Modified: trunk/src/pyphant/pyphant/tests/TestEventDispatcher.py =================================================================== --- trunk/src/pyphant/pyphant/tests/TestEventDispatcher.py 2009-02-18 15:18:57 UTC (rev 615) +++ trunk/src/pyphant/pyphant/tests/TestEventDispatcher.py 2009-02-18 15:44:01 UTC (rev 616) @@ -39,7 +39,7 @@ import unittest import pkg_resources -pkg_resources.require("Pyphant") +pkg_resources.require("pyphant") import pyphant.core.EventDispatcher as EventDispatcher Modified: trunk/src/pyphant/pyphant/tests/TestImageVisualizer.py =================================================================== --- trunk/src/pyphant/pyphant/tests/TestImageVisualizer.py 2009-02-18 15:18:57 UTC (rev 615) +++ trunk/src/pyphant/pyphant/tests/TestImageVisualizer.py 2009-02-18 15:44:01 UTC (rev 616) @@ -46,8 +46,8 @@ import pkg_resources -pkg_resources.require("Pyphant") -pkg_resources.require("Pyphant_OSC") +pkg_resources.require("pyphant") +pkg_resources.require("pyphant.osc") import numpy from pyphant.visualizers.ImageVisualizer import ImageVisualizer Modified: trunk/src/pyphant/pyphant/tests/TestPyTablesPersister.py =================================================================== --- trunk/src/pyphant/pyphant/tests/TestPyTablesPersister.py 2009-02-18 15:18:57 UTC (rev 615) +++ trunk/src/pyphant/pyphant/tests/TestPyTablesPersister.py 2009-02-18 15:44:01 UTC (rev 616) @@ -40,7 +40,7 @@ import unittest import pkg_resources -pkg_resources.require("Pyphant") +pkg_resources.require("pyphant") import scipy import copy, datetime Modified: trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestApplyMask.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestApplyMask.py 2009-02-18 15:18:57 UTC (rev 615) +++ trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestApplyMask.py 2009-02-18 15:44:01 UTC (rev 616) @@ -43,7 +43,7 @@ import pkg_resources -pkg_resources.require("Pyphant") +pkg_resources.require("pyphant") import ImageProcessing as I import ImageProcessing.ApplyMask as IM Modified: trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestDistanceMapper.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestDistanceMapper.py 2009-02-18 15:18:57 UTC (rev 615) +++ trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestDistanceMapper.py 2009-02-18 15:44:01 UTC (rev 616) @@ -46,7 +46,7 @@ import pkg_resources -pkg_resources.require("Pyphant") +pkg_resources.require("pyphant") import ImageProcessing as I import ImageProcessing.DistanceMapper as IM Modified: trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestImageLoader.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestImageLoader.py 2009-02-18 15:18:57 UTC (rev 615) +++ trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestImageLoader.py 2009-02-18 15:44:01 UTC (rev 616) @@ -44,7 +44,7 @@ import pkg_resources -pkg_resources.require("Pyphant") +pkg_resources.require("pyphant") import os.path Modified: trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestMedianiser.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestMedianiser.py 2009-02-18 15:18:57 UTC (rev 615) +++ trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestMedianiser.py 2009-02-18 15:44:01 UTC (rev 616) @@ -44,7 +44,7 @@ import pkg_resources -pkg_resources.require("Pyphant") +pkg_resources.require("pyphant") import ImageProcessing.Medianiser as IM import numpy Modified: trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestSkeletonizeFeature.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestSkeletonizeFeature.py 2009-02-18 15:18:57 UTC (rev 615) +++ trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestSkeletonizeFeature.py 2009-02-18 15:44:01 UTC (rev 616) @@ -43,7 +43,7 @@ import pkg_resources -pkg_resources.require("Pyphant") +pkg_resources.require("pyphant") import ImageProcessing as I import ImageProcessing.SkeletonizeFeature as IM Modified: trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestSlopeCalculator.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestSlopeCalculator.py 2009-02-18 15:18:57 UTC (rev 615) +++ trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestSlopeCalculator.py 2009-02-18 15:44:01 UTC (rev 616) @@ -41,7 +41,7 @@ import pkg_resources -pkg_resources.require("Pyphant") +pkg_resources.require("pyphant") import numpy import pylab Modified: trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestThresholdingWorker.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestThresholdingWorker.py 2009-02-18 15:18:57 UTC (rev 615) +++ trunk/src/workers/ImageProcessing/ImageProcessing/tests/TestThresholdingWorker.py 2009-02-18 15:44:01 UTC (rev 616) @@ -44,7 +44,7 @@ import pkg_resources -pkg_resources.require("Pyphant") +pkg_resources.require("pyphant") import ImageProcessing as I import ImageProcessing.ThresholdingWorker as IM Modified: trunk/src/workers/OSC/OSC/tests/TestErrorEstimator.py =================================================================== --- trunk/src/workers/OSC/OSC/tests/TestErrorEstimator.py 2009-02-18 15:18:57 UTC (rev 615) +++ trunk/src/workers/OSC/OSC/tests/TestErrorEstimator.py 2009-02-18 15:44:01 UTC (rev 616) @@ -42,8 +42,8 @@ import unittest import pkg_resources -pkg_resources.require("Pyphant") -pkg_resources.require("Pyphant_OSC") +pkg_resources.require("pyphant") +pkg_resources.require("pyphant.osc") import numpy import OSC.ErrorEstimator as OE Modified: trunk/src/workers/OSC/OSC/tests/TestExtremumFinder.py =================================================================== --- trunk/src/workers/OSC/OSC/tests/TestExtremumFinder.py 2009-02-18 15:18:57 UTC (rev 615) +++ trunk/src/workers/OSC/OSC/tests/TestExtremumFinder.py 2009-02-18 15:44:01 UTC (rev 616) @@ -44,8 +44,8 @@ import pkg_resources -pkg_resources.require("Pyphant") -pkg_resources.require("Pyphant_OSC") +pkg_resources.require("pyphant") +pkg_resources.require("pyphant.osc") import os.path Modified: trunk/src/workers/OSC/OSC/tests/TestMRA.py =================================================================== --- trunk/src/workers/OSC/OSC/tests/TestMRA.py 2009-02-18 15:18:57 UTC (rev 615) +++ trunk/src/workers/OSC/OSC/tests/TestMRA.py 2009-02-18 15:44:01 UTC (rev 616) @@ -44,8 +44,8 @@ import pkg_resources -pkg_resources.require("Pyphant") -pkg_resources.require("Pyphant_OSC") +pkg_resources.require("pyphant") +pkg_resources.require("pyphant.osc") import os.path Modified: trunk/src/workers/OSC/OSC/tests/TestOscAbsorption.py =================================================================== --- trunk/src/workers/OSC/OSC/tests/TestOscAbsorption.py 2009-02-18 15:18:57 UTC (rev 615) +++ trunk/src/workers/OSC/OSC/tests/TestOscAbsorption.py 2009-02-18 15:44:01 UTC (rev 616) @@ -45,8 +45,8 @@ import pkg_resources -pkg_resources.require("Pyphant") -pkg_resources.require("Pyphant_OSC") +pkg_resources.require("pyphant") +pkg_resources.require("pyphant.osc") import os.path Modified: trunk/src/workers/Statistics/Statistics/tests/TestHistogram.py =================================================================== --- trunk/src/workers/Statistics/Statistics/tests/TestHistogram.py 2009-02-18 15:18:57 UTC (rev 615) +++ trunk/src/workers/Statistics/Statistics/tests/TestHistogram.py 2009-02-18 15:44:01 UTC (rev 616) @@ -43,7 +43,7 @@ import pkg_resources -pkg_resources.require("Pyphant") +pkg_resources.require("pyphant") import Statistics.Histogram as S from pyphant.visualizers.ImageVisualizer import ImageVisualizer Modified: trunk/src/workers/fmfile/fmfile/tests/TestFMFLoader.py =================================================================== --- trunk/src/workers/fmfile/fmfile/tests/TestFMFLoader.py 2009-02-18 15:18:57 UTC (rev 615) +++ trunk/src/workers/fmfile/fmfile/tests/TestFMFLoader.py 2009-02-18 15:44:01 UTC (rev 616) @@ -33,7 +33,7 @@ # $Source$ import pkg_resources -pkg_resources.require('Pyphant_FMF') +pkg_resources.require('pyphant.fmf') import unittest, numpy from fmfile import FMFLoader This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <zk...@us...> - 2009-03-17 13:42:46
|
Revision: 621 http://pyphant.svn.sourceforge.net/pyphant/?rev=621&view=rev Author: zklaus Date: 2009-03-17 13:42:37 +0000 (Tue, 17 Mar 2009) Log Message: ----------- Merge branch 'SampleContainerSlicing' Conflicts: src/pyphant/pyphant/core/DataContainer.py src/pyphant/pyphant/tests/TestDataContainer.py Modified Paths: -------------- trunk/src/pyphant/pyphant/core/DataContainer.py trunk/src/pyphant/pyphant/tests/TestDataContainer.py trunk/src/workers/ImageProcessing/ImageProcessing/__init__.py Added Paths: ----------- trunk/src/workers/ImageProcessing/ImageProcessing/FilterWorker.py Modified: trunk/src/pyphant/pyphant/core/DataContainer.py =================================================================== --- trunk/src/pyphant/pyphant/core/DataContainer.py 2009-03-17 13:41:49 UTC (rev 620) +++ trunk/src/pyphant/pyphant/core/DataContainer.py 2009-03-17 13:42:37 UTC (rev 621) @@ -265,8 +265,9 @@ def numberOfColumns(self): return len(self.columns) - #helper method for filter(expression) which parses human input to python code, ensuring no harm can be done by eval() - def _parseExpression(self, expression): + #helper method for filterDiscarded(expression) which parses human input to python code, ensuring no harm can be done by eval() + #this method is discarded + def _parseExpressionDiscarded(self, expression): import re reDoubleQuotes = re.compile(r'("[^"][^"]*")') reSplit = re.compile(r'(<(?!=)|<=|>(?!=)|>=|==|!=|and|or|not|AND|OR|NOT|\(|\))') @@ -308,15 +309,27 @@ return None #resolve multiple CompareOps like a <= b <= c == d: - ral = abstractlist[:] #future resolved abstractlist + ral = abstractlist[:] #<-- future resolved abstractlist i = 0 values = ['PhysQuant', 'SCColumn', 'Number'] + start_sequence = -1 while i < len(ral) - 4: if (ral[i][0] in values) and (ral[i+1][0] == 'CompareOp') and (ral[i+2][0] in values) and (ral[i+3][0] == 'CompareOp') and (ral[i+4][0] in values): + if start_sequence == -1: start_sequence = i ral.insert(i+3, ('Delimiter', 'and')) ral.insert(i+4, ral[i+2]) i += 4 - else: i += 1 + else: + if start_sequence != -1: #<-- this is necessary because 'not' has higher precedence than 'and' + ral.insert(start_sequence, ('Delimiter', '(')) + ral.insert(i+4, ('Delimiter', ')')) + start_sequence = -1 + i += 3 + else: + i += 1 + if start_sequence != -1: + ral.insert(start_sequence, ('Delimiter', '(')) + ral.insert(i+4, ('Delimiter', ')')) #parse splitted expression to fit requierements of python eval() method: parsed = '' @@ -331,13 +344,16 @@ #returns new SampleContainer containing all entries that match expression - def filter(self, expression): - parsed = self._parseExpression(expression) + #this method has been replaced by the new version of SampleContainer.filter + def filterDiscarded(self, expression): + if expression == '': + return copy.deepcopy(self) + + parsed = self._parseExpressionDiscarded(expression) if parsed == None: return None - #TODO: Nicer Iteration, reject multidim arrays or even better: handle them correctly, - # check whether all columns have same length + #TODO: check whether all columns have same length #create the selection mask mask = [] @@ -370,6 +386,335 @@ return result + #This method generates nested tuples of filter commands to be applied to a SampleContainer out of a string expression + def _getCommands(self, expression): + #TODO: compare SCColumns with each other, + # allow multi dimensional columns (think up new syntax) + + import re + #test for expressions containing whitespaces only: + if len(re.findall(r'\S', expression)) == 0: + return () + + #prepare regular expressions + reDoubleQuotes = re.compile(r'("[^"][^"]*")') + reSplit = re.compile(r'(<(?!=)|<=|>(?!=)|>=|==|!=|not|NOT|and|AND|or|OR|\(|\))') + reCompareOp = re.compile(r'<|>|==|!=') + + #split the expression + DQList = reDoubleQuotes.split(expression) + splitlist = [] + for dq in DQList: + if reDoubleQuotes.match(dq) != None: splitlist.append(dq) + else: splitlist.extend(reSplit.split(dq)) + + #identify splitted Elements + al = [] #<-- abstractlist containing tuples of (type, expression) + for e in splitlist: + if len(re.findall(r'\S', e)) == 0: pass #<-- skip whitespaces + elif reCompareOp.match(e) != None: + al.append(('CompareOp', e)) + elif reSplit.match(e) != None: + al.append(('Delimiter', e.lower())) + elif reDoubleQuotes.match(e) != None: + column = None + try: + column = self[e[1:-1]] + except: + raise IndexError, 'Could not find column ' + e + ' in "' + self.longname + '".' + al.append(('SCColumn', column)) + else: + try: + phq = PhysicalQuantity(e) + al.append(('PhysQuant', phq)) + continue + except: + try: + number = PhysicalQuantity(e+' m') + al.append(('Number', eval(e))) + continue + except: pass + raise ValueError, "Error parsing expression: " + e + + #resolve multiple CompareOps like a <= b <= c == d: + i = 0 + values = ['PhysQuant', 'SCColumn', 'Number'] + start_sequence = -1 + while i < len(al) - 4: + if (al[i][0] in values) and (al[i+1][0] == 'CompareOp') and (al[i+2][0] in values) and (al[i+3][0] == 'CompareOp') and (al[i+4][0] in values): + if start_sequence == -1: + start_sequence = i + al.insert(i+3, ('Delimiter', 'and')) + al.insert(i+4, al[i+2]) + i += 4 + else: + if start_sequence != -1: #<-- this is necessary because 'not' has higher precedence than 'and' + al.insert(start_sequence, ('Delimiter', '(')) + al.insert(i+4, ('Delimiter', ')')) + start_sequence = -1 + i += 3 + else: + i += 1 + if start_sequence != -1: + al.insert(start_sequence, ('Delimiter', '(')) + al.insert(i+4, ('Delimiter', ')')) + + #identify atomar components like "a" <= 10m, 10s > "b", ... and compress them: + if al[0][0] == 'CompareOp': + raise ValueError, al[0][1] + " may not stand at the very beginning of an expression!" + i = 1 + valid = False + while i < len(al): + if al[i][0] == 'CompareOp': + left = al.pop(i-1) + middle = al.pop(i-1) + right = al.pop(i-1) + if left[0] not in values: + raise TypeError, str(left[1]) + " is not a proper value." + if right[0] not in values: + raise TypeError, str(right[1]) + " is not a proper value." + al.insert(i-1, ('Atomar', left, middle[1], right)) + valid = True + i += 1 + if not valid: raise ValueError, "There has to be at least one valid comparison: " + expression + + #identify braces and compress them recursively: + def compressBraces(sublist): + openbraces = 0 + start = 0 + end = 0 + finished = False + for i in range(len(sublist)): + if sublist[i] == ('Delimiter', '('): + openbraces += 1 + if openbraces == 1 and not finished: + start = i + elif sublist[i] == ('Delimiter', ')'): + openbraces -= 1 + if openbraces == 0 and not finished: + end = i + finished = True + if openbraces != 0: + raise ValueError, "There are unmatched braces within the expression: " + expression + if start==0 and end==0: + #no more braces found: end of recursion + return sublist[:] + else: + if end-start == 1: + raise ValueError, "There are braces enclosing nothing in the expression: " + expression + middle = None + if end-start == 2: + #discard unnecessary braces in order to reduce recursion depth later on: + middle = sublist[start+1:start+2] + else: + middle = [('Brace', compressBraces(sublist[start+1:end]))] + return sublist[0:start] + middle + compressBraces(sublist[end+1:]) + + #TODO: The following three methods could be merged into one generalized method for compressing unitary and binary operators. This would be useful in a future version when there are lots of operators to be supported. + + #identify "not"s and compress them recursively: + def compressNot(sublist): + i = 0 + while i < len(sublist): + if sublist[i] == ('Delimiter', 'not'): + if i+1 >= len(sublist): + raise ValueError, "'not' must not stand at the very end of an expression: " + expression + x = sublist[i+1] + if x[0] == 'Atomar': + return sublist[0:i] + [('NOT', x)] + compressNot(sublist[i+2:]) + elif x[0] == 'Brace': + return sublist[0:i] + [('NOT', ('Brace', compressNot(x[1])))] + compressNot(sublist[i+2:]) + else: + raise ValueError, "'not' cannot be applied to " + str(x) + "." + elif sublist[i][0] == 'Brace': + return sublist[0:i] + [('Brace', compressNot(sublist[i][1]))] + compressNot(sublist[i+1:]) + i += 1 + return sublist[:] + + #identify "and"s and compress them recursively: + def compressAnd(sublist, start=0): + i = start #<-- start=1 indicates that the 1st element of sublist has already been compressed. This is necessary for binary operators. + while i < len(sublist): + if sublist[i] == ('Delimiter', 'and'): + left = None + if start == 1 and i == 1: left = sublist[i-1] + else: left = compressAnd(sublist[i-1:i])[0] + if left[0] not in ['NOT', 'AND', 'Brace', 'Atomar']: + raise ValueError, "'and' cannot be applied to " + str(left) + "." + right = compressAnd(sublist[i+1:i+2])[0] + if right[0] not in ['NOT', 'AND', 'Brace', 'Atomar']: + raise ValueError, "'and' cannot be applied to " + str(right) + "." + return sublist[0:i-1] + compressAnd([('AND', left, right)] + sublist[i+2:], 1) + elif sublist[i][0] == 'Brace': + return sublist[0:i] + compressAnd([('Brace', compressAnd(sublist[i][1]))] + sublist[i+1:], 1) + elif sublist[i][0] == 'NOT': + return sublist[0:i] + compressAnd([('NOT', compressAnd([sublist[i][1]])[0])] + sublist[i+1:], 1) + i += 1 + return sublist[:] + + #identify "or"s and compress them recursively, decompress braces in order to reduce recursion depth later on: + def compressOrDCB(sublist, start=0): + i = start #<-- start=1 indicates that the 1st element of sublist has already been compressed. This is necessary for binary operators. + while i < len(sublist): + if sublist[i] == ('Delimiter', 'or'): + left = None + if start == 1 and i == 1: left = sublist[i-1] + else: left = compressOrDCB(sublist[i-1:i])[0] + if left[0] not in ['NOT', 'AND', 'Atomar', 'OR']: + raise ValueError, "'or' cannot be applied to " + str(left) + "." + right = compressOrDCB(sublist[i+1:i+2])[0] + if right[0] not in ['NOT', 'AND', 'Atomar', 'OR']: + raise ValueError, "'or' cannot be applied to " + str(right) + "." + return sublist[0:i-1] + compressOrDCB([('OR', left, right)] + sublist[i+2:], 1) + elif sublist[i][0] == 'Brace': + inner = compressOrDCB(sublist[i][1]) + if len(inner) != 1: + raise ValueError, "Expression could not be parsed completely. (probably missing keyword): " + expression + return sublist[0:i] + compressOrDCB(inner + sublist[i+1:], 1) + elif sublist[i][0] == 'NOT': + return sublist[0:i] + compressOrDCB([('NOT', compressOrDCB([sublist[i][1]])[0])] + sublist[i+1:], 1) + elif sublist[i][0] == 'AND': + return sublist[0:i] + compressOrDCB([('AND', compressOrDCB([sublist[i][1]])[0], compressOrDCB([sublist[i][2]])[0])] + sublist[i+1:], 1) + i += 1 + return sublist[:] + + compressed = compressOrDCB(compressAnd(compressNot(compressBraces(al)))) + if len(compressed) != 1: + raise ValueError, "Expression could not be parsed completely (probably missing keyword): " + expression + return compressed[0] + + + #returns a new SampleContainer with filter commands applied to it. Expression can be either a string expression or nested tuples of commands + def filter(self, expression): + #determine type of expression: + from types import StringType, UnicodeType, TupleType + commands = None + if isinstance(expression, UnicodeType): + commands = self._getCommands(expression.encode('utf-8')) + elif isinstance(expression, StringType): + commands = self._getCommands(expression) + elif isinstance(expression, TupleType) or expression==None: + commands = expression + else: + raise TypeError, "Expression has to be of StringType, UnicodeType, TupleType or None. Found " + str(type(expression)) + " instead!" + + #check for empty commands: + if commands == None or commands==(): + return copy.deepcopy(self) + + #generate boolean numpymask from commands using fast numpy methods: + def evaluateAtomar(atomar): + left = atomar[1] + if left[0] == 'SCColumn': + if left[1].data.ndim != 1: + raise NotImplementedError, 'Comparing columns of dimensions other than one is not yet implemented: "' + left[1].longname + '"' + + right = atomar[3] + if right[0] == 'SCColumn': + if right[1].data.ndim != 1: + raise NotImplementedError, 'Comparing columns of dimensions other than one is not yet implemented: "' + right[1].longname + '"' + + leftvalue = None + rightvalue = None + if left[0] == 'SCColumn' and right[0] == 'SCColumn': + number = right[1].unit/left[1].unit + if isPhysicalQuantity(number): + raise TypeError, 'Cannot compare "' + left[1].longname + '" to "' + right[1].longname + '".' + leftvalue = left[1].data + rightvalue = right[1].data*number + elif left[0] == 'SCColumn': + number = right[1]/left[1].unit + if isPhysicalQuantity(number): + raise TypeError, 'Cannot compare "' + left[1].longname + '" to ' + str(right[1]) + '".' + leftvalue = left[1].data + rightvalue = number + elif right[0] == 'SCColumn': + number = left[1]/right[1].unit + if isPhysicalQuantity(number): + raise TypeError, "Cannot compare " + str(left[1]) + ' to "' + right[1].longname + '".' + leftvalue = number + rightvalue = right[1].data + else: + raise ValueError, "At least one argument of '" + atomar[2][1] + "' has to be a column." + + if atomar[2] == '==': return leftvalue == rightvalue + elif atomar[2] == '!=': return leftvalue != rightvalue + elif atomar[2] == '<=': return leftvalue <= rightvalue + elif atomar[2] == '<' : return leftvalue < rightvalue + elif atomar[2] == '>=': return leftvalue >= rightvalue + elif atomar[2] == '>' : return leftvalue > rightvalue + raise ValueError, "Invalid atomar expression: " + str(atomar) + + def getMaskFromCommands(cmds): + if cmds[0] == 'Atomar': + return evaluateAtomar(cmds) + elif cmds[0] == 'AND': + left = getMaskFromCommands(cmds[1]) + right = getMaskFromCommands(cmds[2]) + if left.shape != right.shape: + raise TypeError, "Cannot apply 'and' to columns of different shape: " + str(left.shape) + ", " + str(right.shape) + return numpy.logical_and(left, right) + elif cmds[0] == 'OR': + left = getMaskFromCommands(cmds[1]) + right = getMaskFromCommands(cmds[2]) + if left.shape != right.shape: + raise TypeError, "Cannot apply 'or' to columns of different shape: " + str(left.shape) + ", " + str(right.shape) + return numpy.logical_or(left, right) + elif cmds[0] == 'NOT': + return numpy.logical_not(getMaskFromCommands(cmds[1])) + + numpymask = getMaskFromCommands(commands) + + #the following code is a bit longish whereas time consuming copy.deepcopy operations with subsequent masking are avoided wherever possible + + #generate new columns with the boolean mask applied to them using fast numpy slicing + maskedcolumns = [] + for c in self.columns: + #mask dimension + mdims = [] + try: + for d in c.dimensions: + md = copy.deepcopy(d) #<-- could be avoided too, but creating a new FieldContainer always ends up with standard dimensions whereas dimensions are not supposed to bear dimensions themselves + if mdims == []: #<-- only primary axis has to be masked + #mask errors of dimensions if there are any + if d.error != None: + md.error = d.error[numpymask] + + #mask data of dimensions + if d.data != None: + md.data = d.data[numpymask] + + mdims.append(md) + + #mask errors: + cerr = None + if c.error != None: cerr = c.error[numpymask] + + #mask data: + cdata = None + if c.data != None: cdata = c.data[numpymask] + + maskedcolumns.append(FieldContainer(cdata, + copy.deepcopy(c.unit), + cerr, + copy.deepcopy(c.mask), + mdims, + longname=c.longname, + shortname=c.shortname, + attributes=copy.deepcopy(c.attributes), + rescale=False)) + except ValueError: + raise ValueError, 'Column "' + c.longname + '" has not enough rows!' + + #build new SampleContainer from masked columns and return it + result = SampleContainer(maskedcolumns, + longname=self.longname, + shortname=self.shortname, + attributes=copy.deepcopy(self.attributes)) + return result + + + def assertEqual(con1,con2,rtol=1e-5,atol=1e-8): diagnosis=StringIO.StringIO() testReport = logging.StreamHandler(diagnosis) Modified: trunk/src/pyphant/pyphant/tests/TestDataContainer.py =================================================================== --- trunk/src/pyphant/pyphant/tests/TestDataContainer.py 2009-03-17 13:41:49 UTC (rev 620) +++ trunk/src/pyphant/pyphant/tests/TestDataContainer.py 2009-03-17 13:42:37 UTC (rev 621) @@ -393,15 +393,56 @@ class SampleContainerSlicingTests(SampleContainerTest): - #TODO: Write more tests with more complicated expressions + def setUp(self): + super(SampleContainerSlicingTests, self).setUp() + time_data = numpy.array([10.0, 20.0, 30.0, 5.0, 9000.0]) + time_error = numpy.array([1.0, 2.0, 3.0, .5, 900.0]) + time_unit = PhysicalQuantity('2s') + time_FC = FieldContainer(time_data, time_unit, time_error, None, None, "Zeit", "t", None, False) + + length_data = numpy.array([-20.0, 0.0, 20.0, 10.0, 5.5]) + length_error = numpy.array([2.0, 0.1, 2.0, 1.0, .5]) + length_unit = PhysicalQuantity('1000m') + length_FC = FieldContainer(length_data, length_unit, length_error, None, None, "Strecke", "l", None, False) + + + temperature_data = numpy.array([[10.1, 10.2, 10.3], + [20.1, 20.2, 20.3], + [30.1, 30.2, 30.3], + [40.1, 40.2, 40.3], + [50.1, 50.2, 50.3]]) + temperature_error = numpy.array([[0.1, 0.2, 0.3], + [1.1, 1.2, 1.3], + [2.1, 2.2, 2.3], + [3.1, 3.2, 3.3], + [4.1, 4.2, 4.3]]) + temperature_unit = PhysicalQuantity('1mK') + + temperature_FC = FieldContainer(temperature_data, temperature_unit, temperature_error, None, None, "Temperatur", "T", None, False) + + self.sc2d = SampleContainer([length_FC, temperature_FC, time_FC], "Test Container", "TestC") + + self.sc2d["t"].dimensions[0].unit = PhysicalQuantity('5m') + self.sc2d["t"].dimensions[0].data = numpy.array([-20, -10, 0, 10, 20]) + + self.sc2d["l"].dimensions[0].unit = PhysicalQuantity('2mm') + self.sc2d["l"].dimensions[0].data = numpy.array([-1, -0.5, 0, 0.5, 1]) + + self.sc2d["T"].dimensions[0].unit = PhysicalQuantity('0.5mm') + self.sc2d["T"].dimensions[0].data = numpy.array([-3, -1.5, 0, 1.5, 3]) + self.sc2d["T"].dimensions[1].unit = PhysicalQuantity('10nm') + self.sc2d["T"].dimensions[1].data = numpy.array([-1, 0, 1]) + + + #purely one dimensional Tests: def testConsistancy(self): result1 = self.sampleContainer.filter('20m < "i" and 80m > "i"') result2 = self.sampleContainer.filter('20m < "i" < 80m') self.assertEqual(result1[0], result2[0]) self.assertEqual(result1[1], result2[1]) - def testSimpleExpression(self): - result = self.sampleContainer.filter('50m <= "i" < 57m') + def testSimpleUnicodeExpression(self): + result = self.sampleContainer.filter(u'50m <= "i" < 57m') self.assertEqual(len(result.columns), 2) self.assertEqual(len(result[0].data), 7) self.assertEqual(len(result[1].data), 7) @@ -425,7 +466,54 @@ self.assertEqual(result[0], expectedi) self.assertEqual(result[1], expectedt) + #tests involving 2 dimensional FieldContainers: + def _compareExpected(self, expression, ind): + indices = numpy.array(ind) + result = self.sc2d.filter(expression) + expectedSC = copy.deepcopy(self.sc2d) + for FC in expectedSC: + FC.data = FC.data[indices] + FC.error = FC.error[indices] + FC.dimensions[0].data = FC.dimensions[0].data[indices] + self.assertEqual(result, expectedSC) + def testEmpty2dExpression(self): + result = self.sc2d.filter('') + self.assertEqual(result, self.sc2d) + result = self.sc2d.filter(()) + self.assertEqual(result, self.sc2d) + + def testAtomar2dExpressions(self): + self._compareExpected('"t" <= 40.0s', [True, True, False, True, False]) + self._compareExpected('"l" < 10000m', [True, True, False, False, True]) + self._compareExpected('"Zeit" >= 20.0s', [True, True, True, False, True]) + self._compareExpected('"l" > 5500m', [False, False, True, True, False]) + self._compareExpected('"t" == 18000s', [False, False, False, False, True]) + self._compareExpected('"Strecke" != 20000m', [True, True, False, True, True]) + + def testNot2dExpression(self): + self._compareExpected('not "t" == 10s', [True, True, True, False, True]) + + def testAnd2dExpression(self): + self._compareExpected('"Zeit" == 60s and 20000m == "Strecke"', [False, False, True, False, False]) + + def testOr2dExpression(self): + self._compareExpected('"Zeit" < 60s or "Strecke" == 5500m', [True, True, False, True, True]) + + def testPrecedence2dExpression(self): + self._compareExpected('0m > "l" or not ("t" == 20s or "t" == 40s) and (("l" == -20000m or "t" == 40s) or "l" == 5500m)', [True, False, False, False, True]) + + def testNestedTuple2dExpression(self): + self._compareExpected(('AND', ('Atomar', ('SCColumn', self.sc2d["t"]), '==',('PhysQuant', PhysicalQuantity('20s'))), ('Atomar', ('SCColumn', self.sc2d["l"]), '==', ('PhysQuant', PhysicalQuantity('-20000m')))), [True, False, False, False, False]) + + def testMultipleCompareOpPrecedence2dExpression(self): + self._compareExpected('not 0m <= "l" <= 10000m', [True, False, True, False, False]) + + def testColumnToColumn2dExpression(self): + self._compareExpected('"l" == "Strecke"', [True, True, True, True, True]) + self._compareExpected('"t" != "Zeit"', [False, False, False, False, False]) + + class FieldContainerRescaling(unittest.TestCase): def setUp(self): self.testData = scipy.array([[0,1,2],[3,4,5],[6,7,8]]) Copied: trunk/src/workers/ImageProcessing/ImageProcessing/FilterWorker.py (from rev 620, trunk/src/workers/ImageProcessing/ImageProcessing/__init__.py) =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/FilterWorker.py (rev 0) +++ trunk/src/workers/ImageProcessing/ImageProcessing/FilterWorker.py 2009-03-17 13:42:37 UTC (rev 621) @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) 2009, Rectorate of the University of Freiburg +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the Freiburg Materials Research Center, +# University of Freiburg nor the names of its contributors may be used to +# endorse or promote products derived from this software without specific +# prior written permission. +# +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER +# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +u""" + +""" + +__id__ = "$Id$" +__author__ = "$Author$" +__version__ = "$Revision$" +# $Source$ + +from pyphant.core import Worker, Connectors,\ + Param, DataContainer + +class FilterWorker(Worker.Worker): + API = 2 + VERSION = 1 + REVISION = "$Revision$"[11:-1] + name = "SC Filter" + _sockets = [("table", Connectors.TYPE_ARRAY)] + _params = [("expression", "Filter Expression", '', None)] + + @Worker.plug(Connectors.TYPE_ARRAY) + def applyfilter(self, table, subscriber=0): + result = table.filter(self.paramExpression.value) + result.seal() + return result + Modified: trunk/src/workers/ImageProcessing/ImageProcessing/__init__.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/__init__.py 2009-03-17 13:41:49 UTC (rev 620) +++ trunk/src/workers/ImageProcessing/ImageProcessing/__init__.py 2009-03-17 13:42:37 UTC (rev 621) @@ -47,6 +47,7 @@ "DistanceMapper", "EdgeFillWorker", "EdgeTouchingFeatureRemover", + "FilterWorker", "ImageLoaderWorker", "InvertWorker", "Medianiser", This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <zk...@us...> - 2009-03-17 14:26:47
|
Revision: 622 http://pyphant.svn.sourceforge.net/pyphant/?rev=622&view=rev Author: zklaus Date: 2009-03-17 14:26:25 +0000 (Tue, 17 Mar 2009) Log Message: ----------- These are the documentation patches. Modified Paths: -------------- trunk/src/pyphant/pyphant/core/DataContainer.py trunk/src/pyphant/pyphant/core/FieldContainer.py trunk/src/workers/ImageProcessing/ImageProcessing/ApplyMask.py trunk/src/workers/ImageProcessing/ImageProcessing/CoverageWorker.py trunk/src/workers/ImageProcessing/ImageProcessing/DiffWorker.py trunk/src/workers/ImageProcessing/ImageProcessing/DistanceMapper.py trunk/src/workers/ImageProcessing/ImageProcessing/EdgeFillWorker.py trunk/src/workers/ImageProcessing/ImageProcessing/EdgeTouchingFeatureRemover.py trunk/src/workers/ImageProcessing/ImageProcessing/ImageLoaderWorker.py trunk/src/workers/ImageProcessing/ImageProcessing/InvertWorker.py trunk/src/workers/ImageProcessing/ImageProcessing/Medianiser.py trunk/src/workers/ImageProcessing/ImageProcessing/ThresholdingWorker.py trunk/src/workers/ImageProcessing/ImageProcessing/UltimatePointsCalculator.py trunk/src/workers/ImageProcessing/ImageProcessing/__init__.py trunk/src/workers/OSC/OSC/AddColumn.py trunk/src/workers/OSC/OSC/CompareFields.py trunk/src/workers/OSC/OSC/ComputeFunctional.py trunk/src/workers/OSC/OSC/ErrorEstimator.py trunk/src/workers/OSC/OSC/ExtremumFinder.py trunk/src/workers/OSC/OSC/Slicing.py trunk/src/workers/OSC/OSC/Smoother.py trunk/src/workers/OSC/OSC/__init__.py trunk/src/workers/Statistics/Statistics/Histogram.py trunk/src/workers/fmfile/fmfile/FMFLoader.py trunk/src/workers/fmfile/fmfile/__init__.py Modified: trunk/src/pyphant/pyphant/core/DataContainer.py =================================================================== --- trunk/src/pyphant/pyphant/core/DataContainer.py 2009-03-17 13:42:37 UTC (rev 621) +++ trunk/src/pyphant/pyphant/core/DataContainer.py 2009-03-17 14:26:25 UTC (rev 622) @@ -31,25 +31,37 @@ # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. u""" -DataContainer \t- A Pyphant modul for self-explanatory scientific data -====================================================================== -\nA Pyphant DataContainer presents the following attributes: -\t .longname \t- Notation of the data, e.g. 'electric field', -\t\t\t which is used for the automatic annotation of charts. -\t .shortname \t- Symbol of the physical variable in LaTeX notation, e.g. 'E_\\alpha', -\t\t\t which is also used for the automatic annotation of charts. -\t .id \t\t- Identifier of Enhanced MD5 (emd5) format -\t\t\t\temd5://NODE/USER/DATETIME/MD5-HASH.TYPESTRING -\t\t\t which is set by calling method .seal() and -\t\t\t indicates that the stored information are unchangable. -\t .label\t- Typical axis description composed from the meta information of the DataContainer. -\t .data \t- Data object, e.g. numpy.array +============================================================================= +**DataContainer** -- A Pyphant module for *self-explanatory scientific data* +============================================================================= -DataContainer \t\t- Base class for self-explanatory scientific data -FieldContainer \t\t- Class describing sampled fields -SampleContainer \t- Class used for storing realizations of random variables -generateIndex() \t- Function returning an indexing FieldContainer instance -parseId()\t\t- Function returning tupple (HASH,TYPESTRING) from given .id attribute. +The *DataContainer* is Pypahnt's preferred data exchange class. It is +designed to maximise the interoperability of the various workers +provided by Pyphant. + +It can be seen as an interface for exchanging data between workers and +visualizers and among workers. It reproduces the self-descriptiveness of the *network +Common Data Form* (netCDF). Once sealed it is immutable. It can be +identified by its *emd5* attribute, a unique identifier composed of +information about the origin of the container. + +There are two kinds of DataContainers: + + - L{FieldContainer} + - is designed to store *sampled scalar Fields* + + - L{SampleContainer} + - is designed to store *tabular data* + + +**SampleConatiner** -- A pyphant module storing tabular data +============================================================= + +The *SampleContainer* combines different FieldContainers that have the +same numer of sample points to a table-like representation. It stores +different observations on the same subject per row whereby each column +comprises a quantity of the same kind. Each row can be regarded as the +realization of a random variable. """ __id__ = "$Id$" Modified: trunk/src/pyphant/pyphant/core/FieldContainer.py =================================================================== --- trunk/src/pyphant/pyphant/core/FieldContainer.py 2009-03-17 13:42:37 UTC (rev 621) +++ trunk/src/pyphant/pyphant/core/FieldContainer.py 2009-03-17 14:26:25 UTC (rev 622) @@ -30,6 +30,29 @@ # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +u""" +================================================================= +The **FieldContainer** -- A module storing sampled scalar fields +================================================================= + +The *FieldContainer* represents a class of Pyphant's L{DataContainer} +module. + + That is Pyphant's module for *self-desriptive scientific data* + which is designed to maximise the interoperability of the various + workers. It can be seen as an interface for exchanging scientific + information between workers and visualizers. It reproduces the + self-descriptiveness of the *network Common Data Form* (netCDF). Once + sealed a DataContainer is immutable but can be identified by + its *emd5* format which holds information about the origin of + the container. + +The *FieldContainer* stores a sampled scalar Filed. + + It holds an *n-dimensional array* together with + its *unit* and coordinates of the idependent variable (*dimensions*). +""" + __id__ = "$Id$" __author__ = "$Author$" __version__ = "$Revision$" Modified: trunk/src/workers/ImageProcessing/ImageProcessing/ApplyMask.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/ApplyMask.py 2009-03-17 13:42:37 UTC (rev 621) +++ trunk/src/workers/ImageProcessing/ImageProcessing/ApplyMask.py 2009-03-17 14:26:25 UTC (rev 622) @@ -29,7 +29,10 @@ # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -u"""Class ApplyMask of Pyphant's Imageprocessing toolbox. +u""" +The ApplyMask Worker is a class of Pyphant's Image Processing +Toolbox. By using this worker one gry-scale image can be applied as a +mask on another image. """ __id__ = "$Id$" Modified: trunk/src/workers/ImageProcessing/ImageProcessing/CoverageWorker.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/CoverageWorker.py 2009-03-17 13:42:37 UTC (rev 621) +++ trunk/src/workers/ImageProcessing/ImageProcessing/CoverageWorker.py 2009-03-17 14:26:25 UTC (rev 622) @@ -30,6 +30,10 @@ # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. u""" +The Coverage Worker is a class of Pyphant's Image Processing +toolbox. It compares everx pixel with a calculated +threshold. Therefore required percentages of black and white material +in the image can be edited. """ __id__ = "$Id$" Modified: trunk/src/workers/ImageProcessing/ImageProcessing/DiffWorker.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/DiffWorker.py 2009-03-17 13:42:37 UTC (rev 621) +++ trunk/src/workers/ImageProcessing/ImageProcessing/DiffWorker.py 2009-03-17 14:26:25 UTC (rev 622) @@ -30,6 +30,9 @@ # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. u""" +The Diff Worker belongs to Pyphant's Image Processing Toolbox. It +computes the difference between two images, eg. the skeletonised image +and the origial image. """ __id__ = "$Id$" Modified: trunk/src/workers/ImageProcessing/ImageProcessing/DistanceMapper.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/DistanceMapper.py 2009-03-17 13:42:37 UTC (rev 621) +++ trunk/src/workers/ImageProcessing/ImageProcessing/DistanceMapper.py 2009-03-17 14:26:25 UTC (rev 622) @@ -30,8 +30,11 @@ # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. u""" +The Distance Mapper Worker is a class of Pyphant's Image Processing +Toolbox. It is used to determine the size of respective features in +binary images through calculating the distance of every pixel to the +nearest background pixel. """ - __id__ = "$Id: /local/pyphant/sourceforge/trunk/src/workers/ImageProcessing/ImageProcessing/DistanceMapper.py 3671 2007-12-19T14:18:11.779018Z obi $" __author__ = "$Author: obi $" __version__ = "$Revision: 3671 $" Modified: trunk/src/workers/ImageProcessing/ImageProcessing/EdgeFillWorker.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/EdgeFillWorker.py 2009-03-17 13:42:37 UTC (rev 621) +++ trunk/src/workers/ImageProcessing/ImageProcessing/EdgeFillWorker.py 2009-03-17 14:26:25 UTC (rev 622) @@ -30,6 +30,8 @@ # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. u""" +The Edge Fill Worker is a class of Pyphant's Image Processing +Toolbox. It is used to backfil outlined features again. """ __id__ = "$Id$" Modified: trunk/src/workers/ImageProcessing/ImageProcessing/EdgeTouchingFeatureRemover.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/EdgeTouchingFeatureRemover.py 2009-03-17 13:42:37 UTC (rev 621) +++ trunk/src/workers/ImageProcessing/ImageProcessing/EdgeTouchingFeatureRemover.py 2009-03-17 14:26:25 UTC (rev 622) @@ -30,6 +30,9 @@ # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. u""" +The Touching Feature Remover Worker is a class of Pyphant's Image Processing +Toolbox. It removes all features of an image that are touching the +edge until only the outline remains. """ __id__ = "$Id$" Modified: trunk/src/workers/ImageProcessing/ImageProcessing/ImageLoaderWorker.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/ImageLoaderWorker.py 2009-03-17 13:42:37 UTC (rev 621) +++ trunk/src/workers/ImageProcessing/ImageProcessing/ImageLoaderWorker.py 2009-03-17 14:26:25 UTC (rev 622) @@ -30,6 +30,9 @@ # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. u""" +The ImageLoader Worker is a class of Pyphant's Image Processing +Toolbox. It simply loads an image from the location given in the +worker's configuration. """ __id__ = "$Id$" Modified: trunk/src/workers/ImageProcessing/ImageProcessing/InvertWorker.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/InvertWorker.py 2009-03-17 13:42:37 UTC (rev 621) +++ trunk/src/workers/ImageProcessing/ImageProcessing/InvertWorker.py 2009-03-17 14:26:25 UTC (rev 622) @@ -30,6 +30,8 @@ # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. u""" +The Inverter Worker is a class of Pyphant's Image Processing +Toolbox. It simply inverts an image. """ __id__ = "$Id$" Modified: trunk/src/workers/ImageProcessing/ImageProcessing/Medianiser.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/Medianiser.py 2009-03-17 13:42:37 UTC (rev 621) +++ trunk/src/workers/ImageProcessing/ImageProcessing/Medianiser.py 2009-03-17 14:26:25 UTC (rev 622) @@ -30,6 +30,10 @@ # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. u""" +The Madianiser Worker is a class of Pyphant's Image Processing +Toolbox. It is used to remove noise from an image, by implementing a +standard median filter. In its configurations the size of the applied +kernel and the number of smoothing runs can be edited. """ __id__ = "$Id$" Modified: trunk/src/workers/ImageProcessing/ImageProcessing/ThresholdingWorker.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/ThresholdingWorker.py 2009-03-17 13:42:37 UTC (rev 621) +++ trunk/src/workers/ImageProcessing/ImageProcessing/ThresholdingWorker.py 2009-03-17 14:26:25 UTC (rev 622) @@ -30,6 +30,10 @@ # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. u""" +The Thresholding Worker is a class of Pyphant's image Processing +Toolbox. The threshold can be edited in the worker's configuration. It +returns a binary image where pixels that comprise features are set to +0x00 whereas background pixels are set to 0xFF. """ __id__ = "$Id$" Modified: trunk/src/workers/ImageProcessing/ImageProcessing/UltimatePointsCalculator.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/UltimatePointsCalculator.py 2009-03-17 13:42:37 UTC (rev 621) +++ trunk/src/workers/ImageProcessing/ImageProcessing/UltimatePointsCalculator.py 2009-03-17 14:26:25 UTC (rev 622) @@ -30,6 +30,8 @@ # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. u""" +The Ultimate Points Calculator Worker is a class of Pyphant's Image +Processing Toolbox. It is used to calculate peaks in an image. """ __id__ = "$Id$" Modified: trunk/src/workers/ImageProcessing/ImageProcessing/__init__.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/__init__.py 2009-03-17 13:42:37 UTC (rev 621) +++ trunk/src/workers/ImageProcessing/ImageProcessing/__init__.py 2009-03-17 14:26:25 UTC (rev 622) @@ -30,6 +30,7 @@ # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. u""" +The ImageProcessing toolbox holds workers to process data resulting from scalar fields. """ __id__ = "$Id$" Modified: trunk/src/workers/OSC/OSC/AddColumn.py =================================================================== --- trunk/src/workers/OSC/OSC/AddColumn.py 2009-03-17 13:42:37 UTC (rev 621) +++ trunk/src/workers/OSC/OSC/AddColumn.py 2009-03-17 14:26:25 UTC (rev 622) @@ -30,6 +30,9 @@ # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. u""" +The Add Column Worker is a class of Pyphant's OSC Toolbox. It inserts a +column that was once extracted by using the column extractor in the +orignial chart again. """ __id__ = "$Id$" Modified: trunk/src/workers/OSC/OSC/CompareFields.py =================================================================== --- trunk/src/workers/OSC/OSC/CompareFields.py 2009-03-17 13:42:37 UTC (rev 621) +++ trunk/src/workers/OSC/OSC/CompareFields.py 2009-03-17 14:26:25 UTC (rev 622) @@ -30,6 +30,7 @@ # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. u""" +The Compare Fields Worker is a class of Pyphant's OSC Toolbox. """ __id__ = "$Id$" Modified: trunk/src/workers/OSC/OSC/ComputeFunctional.py =================================================================== --- trunk/src/workers/OSC/OSC/ComputeFunctional.py 2009-03-17 13:42:37 UTC (rev 621) +++ trunk/src/workers/OSC/OSC/ComputeFunctional.py 2009-03-17 14:26:25 UTC (rev 622) @@ -29,7 +29,8 @@ # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -u"""Pyphant module providing worker for finding the local extrema of 1D functions. +u""" +Pyphant module providing worker for finding the local extrema of 1D functions. """ __id__ = "$Id$" Modified: trunk/src/workers/OSC/OSC/ErrorEstimator.py =================================================================== --- trunk/src/workers/OSC/OSC/ErrorEstimator.py 2009-03-17 13:42:37 UTC (rev 621) +++ trunk/src/workers/OSC/OSC/ErrorEstimator.py 2009-03-17 14:26:25 UTC (rev 622) @@ -29,6 +29,8 @@ # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. u""" +The Error Estimaotr Worker is a class of Pyphant's OSC-Toolbox. It +valuates the error caused by noise for every pixel in a field. """ __id__ = "$Id$" Modified: trunk/src/workers/OSC/OSC/ExtremumFinder.py =================================================================== --- trunk/src/workers/OSC/OSC/ExtremumFinder.py 2009-03-17 13:42:37 UTC (rev 621) +++ trunk/src/workers/OSC/OSC/ExtremumFinder.py 2009-03-17 14:26:25 UTC (rev 622) @@ -30,7 +30,11 @@ # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -u"""Pyphant module computing the local extrema of one-dimensional sampled fields. If a two-dimensional field is provided as input, the algorithm loops over the 0th dimension denoting the y-axis, which corresponds to an iteration over the rows of the data matrix. +u""" +Pyphant module computing the local extrema of one-dimensional +sampled fields. If a two-dimensional field is provided as input, the +algorithm loops over the 0th dimension denoting the y-axis, which +corresponds to an iteration over the rows of the data matrix. """ __id__ = "$Id$" Modified: trunk/src/workers/OSC/OSC/Slicing.py =================================================================== --- trunk/src/workers/OSC/OSC/Slicing.py 2009-03-17 13:42:37 UTC (rev 621) +++ trunk/src/workers/OSC/OSC/Slicing.py 2009-03-17 14:26:25 UTC (rev 622) @@ -30,6 +30,8 @@ # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. u""" +The Slicing Worker is a class of Pyphant's OSC Toolbox. It cut out one +part of a field and provides it as a new field to work on. """ __id__ = "$Id$" Modified: trunk/src/workers/OSC/OSC/Smoother.py =================================================================== --- trunk/src/workers/OSC/OSC/Smoother.py 2009-03-17 13:42:37 UTC (rev 621) +++ trunk/src/workers/OSC/OSC/Smoother.py 2009-03-17 14:26:25 UTC (rev 622) @@ -30,6 +30,8 @@ # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. u""" +The Smoother Worker is a class of Pyphant's OSC Toolbox. It is used to +reduce noise from a field. The numer of smoothing runs can be edited. """ __id__ = "$Id$" Modified: trunk/src/workers/OSC/OSC/__init__.py =================================================================== --- trunk/src/workers/OSC/OSC/__init__.py 2009-03-17 13:42:37 UTC (rev 621) +++ trunk/src/workers/OSC/OSC/__init__.py 2009-03-17 14:26:25 UTC (rev 622) @@ -30,6 +30,8 @@ # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. u""" +The OSC Toolbox holds workers for processing data coming from organic +solar cells. """ __id__ = "$Id$" Modified: trunk/src/workers/Statistics/Statistics/Histogram.py =================================================================== --- trunk/src/workers/Statistics/Statistics/Histogram.py 2009-03-17 13:42:37 UTC (rev 621) +++ trunk/src/workers/Statistics/Statistics/Histogram.py 2009-03-17 14:26:25 UTC (rev 622) @@ -30,6 +30,10 @@ # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. u""" +The Historgam Worker is a class of Pyphant's Statistic Toolbox. It +calcuates a histogram from the provided data. Histograms can be +visualisd as bar charts or line charts. The rexpective axes are +automatically correctly labled. """ __id__ = "$Id$" Modified: trunk/src/workers/fmfile/fmfile/FMFLoader.py =================================================================== --- trunk/src/workers/fmfile/fmfile/FMFLoader.py 2009-03-17 13:42:37 UTC (rev 621) +++ trunk/src/workers/fmfile/fmfile/FMFLoader.py 2009-03-17 14:26:25 UTC (rev 622) @@ -30,6 +30,8 @@ # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. u""" +The FMF Loader is a class of Pyphant's FMF Toolbox. It loads an FMF +file from the location given in the worker's configuration. """ __id__ = "$Id$" Modified: trunk/src/workers/fmfile/fmfile/__init__.py =================================================================== --- trunk/src/workers/fmfile/fmfile/__init__.py 2009-03-17 13:42:37 UTC (rev 621) +++ trunk/src/workers/fmfile/fmfile/__init__.py 2009-03-17 14:26:25 UTC (rev 622) @@ -30,6 +30,7 @@ # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. u""" +The FMF Toolbox holds workers for processing full meta data format files. """ This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <zk...@us...> - 2009-04-02 14:01:19
|
Revision: 628 http://pyphant.svn.sourceforge.net/pyphant/?rev=628&view=rev Author: zklaus Date: 2009-04-02 13:39:35 +0000 (Thu, 02 Apr 2009) Log Message: ----------- Datetime stamps are interpreted as days counted from the Gregorian date 0001-01-01. - Added support for datetime errors. - Parsing of datetime stamps is moved to ParseQuantities module. - Correct handling of FMF-file quantities collected from ZIP archives - Taking account of suqsequent missing error specification - Taking account of suqsequent missing values (NaN) Modified Paths: -------------- trunk/src/pyphant/pyphant/quantities/ParseQuantities.py trunk/src/pyphant/setup.py trunk/src/workers/fmfile/fmfile/FMFLoader.py trunk/src/workers/fmfile/fmfile/tests/TestFMFLoader.py trunk/src/workers/fmfile/setup.py Added Paths: ----------- trunk/src/pyphant/pyphant/tests/TestParseQuantities.py Modified: trunk/src/pyphant/pyphant/quantities/ParseQuantities.py =================================================================== --- trunk/src/pyphant/pyphant/quantities/ParseQuantities.py 2009-03-20 12:14:26 UTC (rev 627) +++ trunk/src/pyphant/pyphant/quantities/ParseQuantities.py 2009-04-02 13:39:35 UTC (rev 628) @@ -38,6 +38,7 @@ # $Source$ from PhysicalQuantities import PhysicalQuantity +import mx.DateTime.ISO def str2unit(unit): if unit.startswith('.'): @@ -89,3 +90,20 @@ shortname, value = tuple([s.strip() for s in oldVal.split('=')]) value, error = parseQuantity(value) return (shortname, value, error) + +def parseDateTime(value): + """ + >>>parseDateTime('2004-08-21 12:00:00+-12h') + (PhysicalQuantity(731814.5,'d'), PhysicalQuantity(0.5,'d')) + >>>parseDateTime('2004-08-21 12:00:00') + (PhysicalQuantity(731814.5,'d'), None) + """ + datetimeWithError = value.split('+-') + if len(datetimeWithError)==2: + datetime = mx.DateTime.ISO.ParseAny(datetimeWithError[0]) + error = parseQuantity(datetimeWithError[1])[0].inUnitsOf('d') + else: + datetime = mx.DateTime.ISO.ParseAny(value) + error = None + days,seconds = datetime.absvalues() + return (PhysicalQuantity(days,'d')+PhysicalQuantity(seconds,'s'),error) Copied: trunk/src/pyphant/pyphant/tests/TestParseQuantities.py (from rev 627, trunk/src/workers/fmfile/fmfile/tests/TestFMFLoader.py) =================================================================== --- trunk/src/pyphant/pyphant/tests/TestParseQuantities.py (rev 0) +++ trunk/src/pyphant/pyphant/tests/TestParseQuantities.py 2009-04-02 13:39:35 UTC (rev 628) @@ -0,0 +1,59 @@ +#!/usr/bin/env python2.5 +# -*- coding: utf-8 -*- + +# Copyright (c) 2009, Rectorate of the University of Freiburg +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the Freiburg Materials Research Center, +# University of Freiburg nor the names of its contributors may be used to +# endorse or promote products derived from this software without specific +# prior written permission. +# +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER +# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# $Source$ + +import pkg_resources +pkg_resources.require('pyphant') + +import unittest, numpy +from pyphant.quantities.ParseQuantities import parseDateTime +from pyphant.quantities.PhysicalQuantities import PhysicalQuantity +""" + >>>parseDateTime('2004-08-21 12:00:00+-12h') + (PhysicalQuantity(731814.5,'d'), PhysicalQuantity(0.5,'d')) + >>>parseDateTime('2004-08-21 12:00:00') + (PhysicalQuantity(731814.5,'d'), None) +""" +class TestParseDateTime(unittest.TestCase): + def testWithoutError(self): + self.assertEqual(parseDateTime('2004-08-21 12:00:00+-12h'), + (PhysicalQuantity(731814.5,'d'), PhysicalQuantity(0.5,'d')) + ) + +if __name__ == "__main__": + import sys + if len(sys.argv) == 1: + unittest.main() + else: + suite = unittest.TestLoader().loadTestsFromTestCase(eval(sys.argv[1:][0])) + unittest.TextTestRunner().run(suite) Modified: trunk/src/pyphant/setup.py =================================================================== --- trunk/src/pyphant/setup.py 2009-03-20 12:14:26 UTC (rev 627) +++ trunk/src/pyphant/setup.py 2009-04-02 13:39:35 UTC (rev 628) @@ -19,6 +19,7 @@ license = "BSD", url='http://pyphant.sourceforge.net/', install_requires=['sogl>=0.2.0', + 'egenix-mx-base', ## The following are required, but currently not setuptools enabled. #'ScientificPython>=2.6', #'matplotlib>=0.90.1', Modified: trunk/src/workers/fmfile/fmfile/FMFLoader.py =================================================================== --- trunk/src/workers/fmfile/fmfile/FMFLoader.py 2009-03-20 12:14:26 UTC (rev 627) +++ trunk/src/workers/fmfile/fmfile/FMFLoader.py 2009-04-02 13:39:35 UTC (rev 628) @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2008, Rectorate of the University of Freiburg +# Copyright (c) 2008-2009, Rectorate of the University of Freiburg # All rights reserved. # # Redistribution and use in source and binary forms, with or without @@ -43,8 +43,7 @@ from pyphant.core import (Worker, Connectors, Param, DataContainer) from pyphant.quantities.PhysicalQuantities import PhysicalQuantity,isPhysicalUnit,isPhysicalQuantity -from pyphant.quantities.ParseQuantities import parseQuantity, parseVariable, str2unit -import mx.DateTime.ISO +from pyphant.quantities.ParseQuantities import parseQuantity, parseVariable, parseDateTime, str2unit import logging _logger = logging.getLogger("pyphant") @@ -75,6 +74,10 @@ return data def collectAttributes(data): + """Function collectAttributes(data) + data: dictionary referencing the FMF attributes by the respective filenames + returns tupple (dictionary of common attributes, dictionary of varibale attributes) + """ #Collect attributes, define filename as new attribute atts = {u'filename': []} for filename,sc in data.iteritems(): @@ -99,25 +102,77 @@ variableAttr[k]=l return (commonAttr, variableAttr) -def column2FieldContainer(longname, column): - if type(column[0])==type((0,)) and len(column[0])==3: - shortname = column[0][0] - if isPhysicalQuantity(column[0][1]): - unit = column[0][1].unit - field = [row[1].inUnitsOf(unit).value for row in column] +class column2Field: + def __init__(self): + self.Np = 0 + self.Nt = 0 + + def norm(self,datum,unit,error=False): + if isPhysicalQuantity(datum): + try: + return datum.inUnitsOf(unit).value + except: + raise ValueError, "The datum %s cannot be expressed in terms of %s." % (datum,unit) + elif error: + return 0.0 else: - unit = 1.0 - field = [row[1] for row in column] - result = DataContainer.FieldContainer(numpy.array(field),unit=PhysicalQuantity(1.0, unit),shortname=shortname,longname=longname) - else: - #Joining lists of strings - if type(column[0]) in (type(''),type(u'')): - for i in xrange(len(column)): - if type(column[i]) == type([]): - column[i] = ','.join(column[i]) - result = DataContainer.FieldContainer(numpy.array(column),longname=longname) - return result + return numpy.NaN + def __call__(self,longname,column): + if type(column[0])==type((0,)): + if len(column[0])==2: + indexDatum = 0 + indexError = 1 + if isPhysicalQuantity(column[0][1]) and column[0][1].isCompatible('s'): + shortname = 't_%i' % self.Nt + self.Nt += 1 + else: + shortname = 'p_%i' % self.Np + self.Np += 1 + for i,element in enumerate(column): + if not type(element)==type((0,)): + column[i]=(numpy.NaN,None) + elif len(column[0])==3: + shortname = column[0][0] + indexDatum = 1 + indexError = 2 + for i,element in enumerate(column): + if not type(element)==type((0,)): + column[i]=(shortname,numpy.NaN,None) + try: + data = [element[indexDatum] for element in column] + except: + print longname,column + import sys + sys.exit(0) + error = [element[indexError] for element in column] + unitCandidates = [element.unit for element in data if isPhysicalQuantity(element)] + if len(unitCandidates) == 0: + unit = 1.0 + else: + unit = unitCandidates[0] + normation = lambda arg: self.norm(arg,unit) + field = numpy.array(map(normation,data)) + ErrorNormation = lambda arg: self.norm(arg,unit,error=True) + result = DataContainer.FieldContainer(field, + error=numpy.array(map(ErrorNormation,error)), + mask = numpy.isnan(field), + unit=PhysicalQuantity(1.0, unit), + shortname=shortname,longname=longname) + else: + #Joining lists of strings + if type(column[0])==type([]): + firstElement = column[0][0] + else: + firstElement = column[0] + if type(firstElement) in (type(''),type(u'')): + for i in xrange(len(column)): + if type(column[i]) == type([]): + column[i] = ','.join(column[i]) + result = DataContainer.FieldContainer(numpy.array(column),longname=longname) + return result +column2FieldContainer = column2Field() + def unpackAndCollateFields(variableAttr, data): fieldData = {} dependencies = {} @@ -291,7 +346,7 @@ parseBool, parseQuantity, parseVariable, - lambda d: str(mx.DateTime.ISO.ParseAny(d)) + parseDateTime ] def item2value(section, key): Modified: trunk/src/workers/fmfile/fmfile/tests/TestFMFLoader.py =================================================================== --- trunk/src/workers/fmfile/fmfile/tests/TestFMFLoader.py 2009-03-20 12:14:26 UTC (rev 627) +++ trunk/src/workers/fmfile/fmfile/tests/TestFMFLoader.py 2009-04-02 13:39:35 UTC (rev 628) @@ -1,7 +1,7 @@ #!/usr/bin/env python2.5 # -*- coding: utf-8 -*- -# Copyright (c) 2008, Rectorate of the University of Freiburg +# Copyright (c) 2008-2009 Rectorate of the University of Freiburg # All rights reserved. # # Redistribution and use in source and binary forms, with or without @@ -37,8 +37,9 @@ import unittest, numpy from fmfile import FMFLoader +from pyphant.core.DataContainer import FieldContainer,assertEqual +from pyphant.quantities.PhysicalQuantities import PhysicalQuantity - class FieldContainerCondenseDim(unittest.TestCase): def setUp(self): self.x = numpy.linspace(0,0.9,10) @@ -53,14 +54,59 @@ result = FMFLoader.checkAndCondense(self.valid) numpy.testing.assert_array_equal(self.x, result) +class TestColumn2FieldContainer(unittest.TestCase): + def testStrings(self): + column = ['Hello','World'] + result = FMFLoader.column2FieldContainer('simple string',column) + expectedResult = FieldContainer(numpy.array(column),longname='simple string') + assertEqual(result,expectedResult) + def testListofStrings(self): + column = ['World',['Hello', 'World'],'World'] + result = FMFLoader.column2FieldContainer('simple string',column) + expectedResult = FieldContainer(numpy.array(['World','Hello, World','World']),longname='simple string') + assertEqual(result,expectedResult) + + def testListofStrings2(self): + column = [['Hello', 'World'],'World'] + result = FMFLoader.column2FieldContainer('simple string',column) + expectedResult = FieldContainer(numpy.array(['Hello, World','World']),longname='simple string') + assertEqual(result,expectedResult) + + def testVariable(self): + column = [('T',PhysicalQuantity('22.4 degC'),PhysicalQuantity('0.5 degC')), + ('T',PhysicalQuantity('11.2 degC'),PhysicalQuantity('0.5 degC')) + ] + result = FMFLoader.column2FieldContainer('temperature',column) + expectedResult = FieldContainer(numpy.array([22.4,11.2]),error=numpy.array([0.5,0.5]), + mask = numpy.array([False,False]), + unit='1 degC',longname='temperature',shortname='T') + assertEqual(result,expectedResult) + + def testVariableWithNaN(self): + column = [('T',PhysicalQuantity('22.4 degC'),PhysicalQuantity('0.5 degC')), + ('T',PhysicalQuantity('11.2 degC'),None) + ] + result = FMFLoader.column2FieldContainer('temperature',column) + expectedResult = FieldContainer(numpy.array([22.4,11.2]),error=numpy.array([0.5,0.0]), + mask = numpy.array([False,False]), + unit='1 degC',longname='temperature',shortname='T') + assertEqual(result,expectedResult) + + def testVariableFirstNaN(self): + column = [('T','NaN',PhysicalQuantity('0.5 degC')), + ('T',PhysicalQuantity('11.2 degC'),None) + ] + result = FMFLoader.column2FieldContainer('temperature',column) + expectedResult = FieldContainer(numpy.array([numpy.NaN,11.2]),error=numpy.array([0.5,0.0]), + mask = numpy.array([True,False]), + unit='1 degC',longname='temperature',shortname='T') + assertEqual(result,expectedResult) + if __name__ == "__main__": - suite = unittest.TestLoader().loadTestsFromTestCase(FieldContainerCondenseDim) - unittest.TextTestRunner().run(suite) - #suite.addTest(unittest.TestLoader().loadTestsFromTestCase(FieldContainerSlicing1dDim)) - #import sys - #if len(sys.argv) == 1: - # unittest.main() - #else: - # suite = unittest.TestLoader().loadTestsFromTestCase(eval(sys.argv[1:][0])) - # unittest.TextTestRunner().run(suite) + import sys + if len(sys.argv) == 1: + unittest.main() + else: + suite = unittest.TestLoader().loadTestsFromTestCase(eval(sys.argv[1:][0])) + unittest.TextTestRunner().run(suite) Modified: trunk/src/workers/fmfile/setup.py =================================================================== --- trunk/src/workers/fmfile/setup.py 2009-03-20 12:14:26 UTC (rev 627) +++ trunk/src/workers/fmfile/setup.py 2009-04-02 13:39:35 UTC (rev 628) @@ -21,8 +21,8 @@ author = __author__, description = __doc__, install_requires=['pyphant>=0.4alpha3', - 'ConfigObj', - 'egenix-mx-base'], + 'ConfigObj' + ] packages = ['fmfile'], entry_points = """ [pyphant.workers] This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <zk...@us...> - 2009-04-02 16:38:46
|
Revision: 629 http://pyphant.svn.sourceforge.net/pyphant/?rev=629&view=rev Author: zklaus Date: 2009-04-02 16:38:39 +0000 (Thu, 02 Apr 2009) Log Message: ----------- Merge branch 'master' into svn-trunk Modified Paths: -------------- trunk/src/pyphant/pyphant/core/FieldContainer.py trunk/src/pyphant/pyphant/core/PyTablesPersister.py trunk/src/workers/ImageProcessing/ImageProcessing/DiffWorker.py trunk/src/workers/fmfile/fmfile/FMFWriter.py trunk/src/workers/fmfile/fmfile/fmfgen.py Added Paths: ----------- trunk/src/pyphant/pyphant/core/KnowledgeManager.py trunk/src/pyphant/pyphant/core/knowledge-ideas.org trunk/src/pyphant/pyphant/tests/TestKnowledgeManager.py Modified: trunk/src/pyphant/pyphant/core/FieldContainer.py =================================================================== --- trunk/src/pyphant/pyphant/core/FieldContainer.py 2009-04-02 13:39:35 UTC (rev 628) +++ trunk/src/pyphant/pyphant/core/FieldContainer.py 2009-04-02 16:38:39 UTC (rev 629) @@ -411,7 +411,7 @@ _logger.debug('The errors differ: The error of the second argument is none, while the error of the first argument is %s.' % error) return False if not numpy.allclose(scaledError,otherScaledError,rtol,atol): - _logger.debug('The errors differ: %s\n%s' % (scaledError,otherScaledError)) + _logger.debug('The normed errors differ: %s\n%s' % (scaledError,otherScaledError)) return False else: if not data.dtype.char in ['S','U']: @@ -489,7 +489,7 @@ def __sub__(self, other): if isinstance(other, FieldContainer): if self.error!=None or other.error!=None: - return NotImplemented + error = other.error + self.error else: error = None if len(self._dimensions) != len(other.dimensions): Added: trunk/src/pyphant/pyphant/core/KnowledgeManager.py =================================================================== --- trunk/src/pyphant/pyphant/core/KnowledgeManager.py (rev 0) +++ trunk/src/pyphant/pyphant/core/KnowledgeManager.py 2009-04-02 16:38:39 UTC (rev 629) @@ -0,0 +1,144 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) 2006-2008, Rectorate of the University of Freiburg +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the Freiburg Materials Research Center, +# University of Freiburg nor the names of its contributors may be used to +# endorse or promote products derived from this software without specific +# prior written permission. +# +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER +# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +u""" +knowledge manager + +- retrieve data from local HDF5 files for given emd5s +""" + +__id__ = "$Id$" +__author__ = "$Author$" +__version__ = "$Revision$" +# $Source: $ + +from pyphant.core.singletonmixin import Singleton +from pyphant.core.DataContainer import parseId +import pyphant.core.PyTablesPersister as ptp + +from types import TupleType +import urllib +import tables +import os, os.path +import logging + +class KnowledgeManagerException(Exception): + def __init__(self, message, parent_excep=None, *args, **kwds): + super(KnowledgeManagerException, self).__init__(message, *args, **kwds) + self._message = message + self._parent_excep = parent_excep + + #def __repr__(self): + # return message+" (reason: %s)" % (str(self._parent_excep),) + +class KnowledgeManager(Singleton): + + def __init__(self): + super(KnowledgeManager, self).__init__() + self._logger = logging.getLogger("pyphant") + self._refs = {} + + def _retrieveURL(self, url): + # exceptions? + self._logger.info("Retrieving url '%s'..." % (url,)) + localfilename, headers = urllib.urlretrieve(url) + self._logger.info("Using local file '%s'." % (localfilename,)) + self._logger.info("Header information: %s", (str(headers),)) + + # + # Save index entries + # + h5 = tables.openFile(localfilename) + # title of 'result_' groups has id in TITLE attribute + dc = None + for group in h5.walkGroups(where="/results"): + id = group._v_attrs.TITLE + if len(id)>0: + self._logger.debug("Registering id '%s'.." % (id,)) + self._refs[id] = (url, localfilename, group._v_pathname) + + return localfilename + + def registerURL(self, url): + localfilename = self._retrieveURL(url) + + def registerDataContainer(self, datacontainer): + try: + assert datacontainer.id is not None + self._refs[datacontainer.id] = datacontainer + except Exception, e: + raise KnowledgeManagerException("Invalid id for DataContainer '" +\ + datacontainer.longname+"'", e) + + #def searchAndRegisterKnowledgeManager(self, host) + # KM =remote object auf host + # registerKnowledgeManager(self, KM) + #def registerKnowledgeManager(self, KM) + + def getDataContainer(self, id, try_cache=True): + if id not in self._refs.keys(): + raise KnowledgeManagerException("Id '%s' unknown." % (id,)) + + ref = self._refs[id] + if isinstance(ref, TupleType): + dc = self._getDCfromURLRef(id, try_cache = try_cache) + else: + dc = ref + + return dc + + def _getDCfromURLRef(self, id, try_cache=True): + url, localfilename, h5path = self._refs[id] + if not try_cache: + os.remove(localfilename) + + if not os.path.exists(localfilename): + localfilename = self._retrieveURL(url) + url, localfilename, h5path = self._refs[id] + + h5 = tables.openFile(localfilename) + + hash, type = parseId(id) + assert type in ['sample','field'] + if type=='sample': + loader = ptp.loadSample + elif type=='field': + loader = ptp.loadField + else: + raise KnowledgeManagerException("Unknown result type '%s'" \ + % (type,)) + try: + self._logger.debug("Loading data from '%s' in file '%s'.." % (localfilename, h5path)) + dc = loader(h5, h5.getNode(h5path)) + except Exception, e: + raise KnowledgeManagerException("Id '%s' known, but cannot be read from file '%s'." \ + % (id,localfilename), e) + return dc Modified: trunk/src/pyphant/pyphant/core/PyTablesPersister.py =================================================================== --- trunk/src/pyphant/pyphant/core/PyTablesPersister.py 2009-04-02 13:39:35 UTC (rev 628) +++ trunk/src/pyphant/pyphant/core/PyTablesPersister.py 2009-04-02 16:38:39 UTC (rev 629) @@ -152,29 +152,6 @@ columns.append(saveResult(column,h5)) h5.setNodeAttr(resultGroup, "columns", columns) -def loadSample(h5, resNode): - result = DataContainer.SampleContainer.__new__(DataContainer.SampleContainer) - result.longname = unicode(h5.getNodeAttr(resNode, "longname"), 'utf-8') - result.shortname = unicode(h5.getNodeAttr(resNode, "shortname"), 'utf-8') - result.attributes = {} - for key in resNode._v_attrs._v_attrnamesuser: - if key not in _reservedAttributes: - result.attributes[key]=h5.getNodeAttr(resNode,key) - columns = [] - for resId in h5.getNodeAttr(resNode,"columns"): - nodename = "/results/"+resId - hash, uriType = DataContainer.parseId(h5.getNodeAttr(nodename, "TITLE")) - if uriType == 'sample': - loader = loadSample - elif uriType =='field': - loader = loadField - else: - raise KeyError, "Unknown UriType %s in saving result %s." % (uriType, result.id) - columns.append(loader(h5,h5.getNode(nodename))) - result.columns=columns - result.seal(resNode._v_title) - return result - def saveField(h5, resultGroup, result): def dump(inputList): def conversion(arg): @@ -313,4 +290,26 @@ result.seal(resNode._v_title) return result +def loadSample(h5, resNode): + result = DataContainer.SampleContainer.__new__(DataContainer.SampleContainer) + result.longname = unicode(h5.getNodeAttr(resNode, "longname"), 'utf-8') + result.shortname = unicode(h5.getNodeAttr(resNode, "shortname"), 'utf-8') + result.attributes = {} + for key in resNode._v_attrs._v_attrnamesuser: + if key not in _reservedAttributes: + result.attributes[key]=h5.getNodeAttr(resNode,key) + columns = [] + for resId in h5.getNodeAttr(resNode,"columns"): + nodename = "/results/"+resId + hash, uriType = DataContainer.parseId(h5.getNodeAttr(nodename, "TITLE")) + if uriType == 'sample': + loader = loadSample + elif uriType =='field': + loader = loadField + else: + raise KeyError, "Unknown UriType %s in saving result %s." % (uriType, result.id) + columns.append(loader(h5,h5.getNode(nodename))) + result.columns=columns + result.seal(resNode._v_title) + return result Added: trunk/src/pyphant/pyphant/core/knowledge-ideas.org =================================================================== --- trunk/src/pyphant/pyphant/core/knowledge-ideas.org (rev 0) +++ trunk/src/pyphant/pyphant/core/knowledge-ideas.org 2009-04-02 16:38:39 UTC (rev 629) @@ -0,0 +1,29 @@ + + +Ideas: + +* + Before caching an entire file, test whether a KnowledgeManager runs + on the remote side. If yes, ask for the file. + +* Download auf Wunsch erzwingen, falls remote Datei ge\xE4ndert? + Alternativ: KnowledgeManager Cache l\xF6schen lassen, siehe urllib.urlcleanup() + +* Was tun, wenn id nicht auffindbar? + + - anderen KnowledgeManager fragen, der sich + entweder aus der URL ergibt oder + der extra registriert ist + +* Referenzen (Index) innerhalb HDF5-Dateien auffrischen? + +* Dictionaries/Caches + + - id -> Referenzen (URLs) + - Referenzen -> lokale Kopien (urllib macht das schon!) + - id -> (Referenz, Knotenreferenz) + + +* Cache Verzeichnis +Bei Initialisieren des KnowledgeManagers Cache Verzeichnis angeben und +bei urlretreive nutzen Added: trunk/src/pyphant/pyphant/tests/TestKnowledgeManager.py =================================================================== --- trunk/src/pyphant/pyphant/tests/TestKnowledgeManager.py (rev 0) +++ trunk/src/pyphant/pyphant/tests/TestKnowledgeManager.py 2009-04-02 16:38:39 UTC (rev 629) @@ -0,0 +1,144 @@ +#!/usr/bin/env python2.5 +# -*- coding: utf-8 -*- + +# Copyright (c) 2006-2008, Rectorate of the University of Freiburg +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the Freiburg Materials Research Center, +# University of Freiburg nor the names of its contributors may be used to +# endorse or promote products derived from this software without specific +# prior written permission. +# +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER +# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +u"""Provides unittest classes +""" + +__id__ = "$Id$".replace('$','') +__author__ = "$Author$".replace('$','') +__version__ = "$Revision$".replace('$','') +# $Source$ + +import unittest +import pkg_resources +pkg_resources.require("pyphant") + +from pyphant.core.KnowledgeManager import KnowledgeManager +import pyphant.core.PyTablesPersister as ptp +from pyphant.core.FieldContainer import FieldContainer +import numpy as N +import tables +import urllib + +import tempfile +import os +import logging + + +class KnowledgeManagerTestCase(unittest.TestCase): + def setUp(self): + a = N.array([0, 1, 2, 3]) + self._fc = FieldContainer(a) + self._fc.seal() + + def testGetLocalFile(self): + + h5fileid, h5name = tempfile.mkstemp(suffix='.h5',prefix='test-') + os.close(h5fileid) + + h5 = tables.openFile(h5name,'w') + resultsGroup = h5.createGroup("/", "results") + ptp.saveResult(self._fc, h5) + h5.close() + + km = KnowledgeManager.getInstance() + + km.registerURL('file://'+h5name) + + km_fc = km.getDataContainer(self._fc.id) + + self.assertEqual(self._fc, km_fc) + + os.remove(h5name) + + def testGetHTTPFile(self): + + host = "omnibus.uni-freiburg.de" + remote_dir = "/~mr78/pyphant-test" + url = "http://"+host+remote_dir+"/knowledgemanager-http-test.h5" + + # Get remote file and load DataContainer + filename, headers = urllib.urlretrieve(url) + h5 = tables.openFile(filename) + for g in h5.walkGroups("/results"): + if (len(g._v_attrs.TITLE)>0) \ + and (r"\Psi" in g._v_attrs.shortname): + http_fc = ptp.loadField(h5,g) + + km = KnowledgeManager.getInstance() + + km.registerURL(url) + + km_fc = km.getDataContainer(http_fc.id) + + self.assertEqual(http_fc, km_fc) + + os.remove(filename) + + def testGetDataContainer(self): + km = KnowledgeManager.getInstance() + + km.registerDataContainer(self._fc) + + km_fc = km.getDataContainer(self._fc.id) + + self.assertEqual(self._fc, km_fc) + + def testExceptions(self): + km = KnowledgeManager.getInstance() + + #invalid id + + #DataContainer not sealed + + #Local file not readable + + #Register empty hdf + +if __name__ == "__main__": + import sys + + logger = logging.getLogger('pyphant') + + + hdlr = logging.StreamHandler(sys.stderr) + formatter = logging.Formatter('[%(name)s|%(levelname)s] %(message)s') + hdlr.setFormatter(formatter) + + logger.addHandler(hdlr) + logger.setLevel(logging.DEBUG) + + if len(sys.argv) == 1: + unittest.main() + else: + suite = unittest.TestLoader().loadTestsFromTestCase(eval(sys.argv[1:][0])) + unittest.TextTestRunner().run(suite) Modified: trunk/src/workers/ImageProcessing/ImageProcessing/DiffWorker.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/DiffWorker.py 2009-04-02 13:39:35 UTC (rev 628) +++ trunk/src/workers/ImageProcessing/ImageProcessing/DiffWorker.py 2009-04-02 16:38:39 UTC (rev 629) @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2006-2007, Rectorate of the University of Freiburg +# Copyright (c) 2006-2009, Rectorate of the University of Freiburg # All rights reserved. # # Redistribution and use in source and binary forms, with or without @@ -45,10 +45,13 @@ class DiffWorker(Worker.Worker): API = 2 - VERSION = 1 + VERSION = 2 REVISION = "$Revision$"[11:-1] - name="Diff Worker" - _params = [("absolute", u"Return absolute of difference: ", [u"Yes", u"No"], None)] + name=u"Diff Worker" + _params = [("absolute", u"Return absolute of difference: ", [u"Yes", u"No"], None), + ("longname",u"Name of result",'default',None), + ("symbol",u"Symbol of result",'default',None)] + _sockets = [ ("image1", Connectors.TYPE_IMAGE), ("image2", Connectors.TYPE_IMAGE)] @@ -57,5 +60,9 @@ result = image1 - image2 if self.paramAbsolute.value==u"Yes": result.data = numpy.abs(result.data) + if self.paramLongname.value != 'default': + result.longname = self.paramLongname.value + if self.paramSymbol.value != 'default': + result.shortname = self.paramSymbol.value result.seal() return result Modified: trunk/src/workers/fmfile/fmfile/FMFWriter.py =================================================================== --- trunk/src/workers/fmfile/fmfile/FMFWriter.py 2009-04-02 13:39:35 UTC (rev 628) +++ trunk/src/workers/fmfile/fmfile/FMFWriter.py 2009-04-02 16:38:39 UTC (rev 629) @@ -68,13 +68,25 @@ fc.add_reference_item('author', USER) if len(fieldContainer.data.shape)==1: dim = fieldContainer.dimensions[0] - data = numpy.vstack([dim.data, fieldContainer.data]) + if fieldContainer.error == None: + data = numpy.vstack([dim.data, fieldContainer.data]) + else: + data = numpy.vstack([dim.data, fieldContainer.data,fieldContainer.error]) tab = factory.gen_table(data.transpose()) tab.add_column_def(dim.longname, dim.shortname, str(dim.unit)) + if fieldContainer.error == None: + errorSymbol = None + else: + errorSymbol = u"\\Delta_{%s}" % fieldContainer.shortname tab.add_column_def(fieldContainer.longname, fieldContainer.shortname, str(fieldContainer.unit), - dependencies = [dim.shortname]) + dependencies = [dim.shortname], + error = errorSymbol) + if fieldContainer.error != None: + tab.add_column_def(u"encertainty of %s" % fieldContainer.longname, + errorSymbol, + str(fieldContainer.unit)) elif fieldContainer.dimensions[0].isIndex(): dim = fieldContainer.dimensions[-1] try: Modified: trunk/src/workers/fmfile/fmfile/fmfgen.py =================================================================== --- trunk/src/workers/fmfile/fmfile/fmfgen.py 2009-04-02 13:39:35 UTC (rev 628) +++ trunk/src/workers/fmfile/fmfile/fmfgen.py 2009-04-02 16:38:39 UTC (rev 629) @@ -222,7 +222,7 @@ class _ColumnDef(_Item): def __init__(self, longname, shortname, unit=None, - deps=[], format=DEFAULT_COL_FORMAT, *args, **kwds): + deps=[], error = None,format=DEFAULT_COL_FORMAT, *args, **kwds): """Column definition as item for a data definition section. longname -- long name for the column, human-readable @@ -230,6 +230,7 @@ unit -- unit like 'm/s' (optional, default: None) deps -- list of dependencies, which are themselves short names of other columns (optional, default: []) + error -- Reference to the column symbol listing the respective error format -- format specifier used for column values (optional) The instances given for 'longname', 'shortname', 'deps' items and 'unit' @@ -244,17 +245,21 @@ value += deps[-1]+RPAREN_DEPEND if unit is not None: value += u" [%s]" % (unit,) + if error is not None: + value += u" +- %s" % (error,) super(_ColumnDef,self).__init__( longname, value, *args, **kwds) self._shortname = shortname self._unit = unit self._deps = deps + self._error = error self._format = format longname = _AutoProperty('_metatag') shortname = _AutoProperty('_shortname') unit = _AutoProperty('_unit') deps = _AutoProperty('_deps') + error = _AutoProperty('_error') format = _AutoProperty('_format') class _Section(_FMFElement): @@ -400,6 +405,7 @@ % (self._shortname,)) def add_column_def(self, longname, shortname, unit=None, dependencies=[], + error = None, format=DEFAULT_COL_FORMAT, arg_coding=None): # , data=None): """Add a column definition to the table. @@ -417,7 +423,7 @@ # data -- sequence with references to data items; length must be # equal to current number of rows in the table; data # is joined to the current table (optional) - coldef = self._factory._gen_column_def(longname, shortname, unit, dependencies, + coldef = self._factory._gen_column_def(longname, shortname, unit, dependencies,error, format, arg_coding) self._coldefs.append(coldef) #if not data is None: @@ -635,7 +641,7 @@ kwds.update(self._element_kwds) # mix in the coding and line marker information return _Item(metatag, value, *args, **kwds) - def _gen_column_def(self, longname, shortname, unit=None, dependencies=[], + def _gen_column_def(self, longname, shortname, unit=None, dependencies=[], error = None, format=DEFAULT_COL_FORMAT, arg_coding=None, *args, **kwds): """Generate a column definition. @@ -664,7 +670,7 @@ for i,d in enumerate(dependencies): dependencies[i] = assure_unicode(d, arg_coding) kwds.update(self._element_kwds) # mix in the coding and line marker information - return _ColumnDef(longname, shortname, unit, dependencies, format, *args, **kwds) + return _ColumnDef(longname, shortname, unit, dependencies, error, format, *args, **kwds) def gen_section(self, tag, arg_coding=None, *args, **kwds): """Generate a section for latter inclusion in a FMF file. This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <zk...@us...> - 2009-04-03 12:48:47
|
Revision: 632 http://pyphant.svn.sourceforge.net/pyphant/?rev=632&view=rev Author: zklaus Date: 2009-04-03 12:48:38 +0000 (Fri, 03 Apr 2009) Log Message: ----------- Merge branch 'master' into svn-trunk * master: Enh: Attributes of FieldContainers are printed to the Full Metdata Format viewer BugFix: Correct handling of attributes Longname and symbol of MRA worker can be set by the user. Corrected syntax error in setup.py of fmfile module. Corrected typo in FMFWriter module. Modified Paths: -------------- trunk/src/pyphant/pyphant/core/DataContainer.py trunk/src/workers/OSC/OSC/MRA.py trunk/src/workers/OSC/OSC/OscAbsorption.py trunk/src/workers/fmfile/fmfile/FMFLoader.py trunk/src/workers/fmfile/fmfile/FMFWriter.py trunk/src/workers/fmfile/setup.py Property Changed: ---------------- trunk/src/workers/fmfile/fmfile/FMFWriter.py Modified: trunk/src/pyphant/pyphant/core/DataContainer.py =================================================================== --- trunk/src/pyphant/pyphant/core/DataContainer.py 2009-04-03 12:48:08 UTC (rev 631) +++ trunk/src/pyphant/pyphant/core/DataContainer.py 2009-04-03 12:48:38 UTC (rev 632) @@ -120,7 +120,7 @@ def __init__(self, longname, shortname, attributes=None): self.longname = longname self.shortname = shortname - if attributes: + if type(attributes) == type({}): self.attributes = attributes else: self.attributes = {} Modified: trunk/src/workers/OSC/OSC/MRA.py =================================================================== --- trunk/src/workers/OSC/OSC/MRA.py 2009-04-03 12:48:08 UTC (rev 631) +++ trunk/src/workers/OSC/OSC/MRA.py 2009-04-03 12:48:38 UTC (rev 632) @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2008, Rectorate of the University of Freiburg +# Copyright (c) 2008-2009, Rectorate of the University of Freiburg # All rights reserved. # # Redistribution and use in source and binary forms, with or without @@ -111,12 +111,14 @@ class MRA(Worker.Worker): API = 2 - VERSION = 1 + VERSION = 2 REVISION = "$Revision$"[11:-1] name = "Multi Resolution Analyser" _sockets = [("field", Connectors.TYPE_IMAGE)] - _params = [("scale", u"Scale", "200 nm", None)] + _params = [("scale", u"Scale", "200 nm", None), + ("longname",u"Name of result",'default',None), + ("symbol",u"Symbol of result",'default',None)] @Worker.plug(Connectors.TYPE_IMAGE) def mra(self, field, subscriber=0): @@ -160,6 +162,10 @@ mask = numpy.isnan(pos).transpose(), longname="%s of the local %s of %s" % (dim.longname,"minima",field.longname), shortname="%s_0" % dim.shortname) + if self.paramLongname.value != 'default': + roots.longname = self.paramLongname.value + if self.paramSymbol.value != 'default': + roots.shortname = self.paramSymbol.value roots.seal() return roots Modified: trunk/src/workers/OSC/OSC/OscAbsorption.py =================================================================== --- trunk/src/workers/OSC/OSC/OscAbsorption.py 2009-04-03 12:48:08 UTC (rev 631) +++ trunk/src/workers/OSC/OSC/OscAbsorption.py 2009-04-03 12:48:38 UTC (rev 632) @@ -120,7 +120,7 @@ def extract(self, osc, subscriber=0): col = osc[self.paramColumn.value] if self.paramIndex.value=='All': - result = col + result = copy.deepcopy(col) else: index = int(self.paramIndex.value) if len(col.dimensions)>1: @@ -137,6 +137,7 @@ dimensions = [dim], longname=col.longname, shortname=col.shortname) + result.attributes = osc.attributes result.seal() return result Modified: trunk/src/workers/fmfile/fmfile/FMFLoader.py =================================================================== --- trunk/src/workers/fmfile/fmfile/FMFLoader.py 2009-04-03 12:48:08 UTC (rev 631) +++ trunk/src/workers/fmfile/fmfile/FMFLoader.py 2009-04-03 12:48:38 UTC (rev 632) @@ -236,7 +236,9 @@ newField.dimensions[dim]=independentFields[indepField] assert newField.isValid() containers.append(newField) - return DataContainer.SampleContainer(containers,attributes=commonAttr) + result = DataContainer.SampleContainer(containers,attributes=commonAttr) + print "FMFLoader",result.attributes + return result def reshapeField(field): if field.isIndependent(): Modified: trunk/src/workers/fmfile/fmfile/FMFWriter.py =================================================================== --- trunk/src/workers/fmfile/fmfile/FMFWriter.py 2009-04-03 12:48:08 UTC (rev 631) +++ trunk/src/workers/fmfile/fmfile/FMFWriter.py 2009-04-03 12:48:38 UTC (rev 632) @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2008, Rectorate of the University of Freiburg +# Copyright (c) 2008-2009, Rectorate of the University of Freiburg # All rights reserved. # # Redistribution and use in source and binary forms, with or without @@ -39,7 +39,7 @@ enc=lambda s: unicode(s, "utf-8") -import platform,os +import platform,os,socket,datetime pltform=platform.system() if pltform=='Linux' or pltform=='Darwin': USER=enc(os.environ['LOGNAME']) @@ -66,6 +66,17 @@ factory = fmfgen.gen_factory(out_coding='utf-8', eol='\n') fc = factory.gen_fmf() fc.add_reference_item('author', USER) + fc.add_reference_item('title',fieldContainer.longname) + fc.add_reference_item('place',socket.getfqdn()) + fc.add_reference_item('created',datetime.datetime.utcnow().isoformat()) + sec = factory.gen_section("parameters") + for key,value in fieldContainer.attributes.iteritems(): + if type(value)==type([]): + output = ' '.join(value) + else: + output = str(value) + sec.add_item(key,output) + fc.add_section(sec) if len(fieldContainer.data.shape)==1: dim = fieldContainer.dimensions[0] if fieldContainer.error == None: @@ -84,7 +95,7 @@ dependencies = [dim.shortname], error = errorSymbol) if fieldContainer.error != None: - tab.add_column_def(u"encertainty of %s" % fieldContainer.longname, + tab.add_column_def(u"uncertainty of %s" % fieldContainer.longname, errorSymbol, str(fieldContainer.unit)) elif fieldContainer.dimensions[0].isIndex(): @@ -111,13 +122,45 @@ return str(fc) import wx +ID_EXIT = 102 +class FMFframe(wx.Frame): + def __init__(self,parent, ID, title): + wx.Frame.__init__(self,parent,ID, title, + wx.DefaultPosition,wx.Size(300,300)) + self.CreateStatusBar() + self.SetStatusText("Full-Metadata Format") + p = wx.Panel(self) + menuBar = wx.MenuBar() + menu = wx.Menu() + menu.Append(101, "&About", + "Full-Metadata Format Viewer") + menu.AppendSeparator() + menu.Append(ID_EXIT,"E&xit","Terminate the program") + menuBar.Append(menu,"&File") + self.SetMenuBar(menuBar) + + wx.EVT_MENU(self,ID_EXIT, self.timeToQuit) + + def timeToQuit(self,event): + self.Close(True) + class TextFrame(wx.Frame): def __init__(self,fmf): wx.Frame.__init__(self,None,-1,'FMFWriter', size=(300,200)) multiText = wx.TextCtrl(self,-1,fmf,size=(200,200),style=wx.TE_MULTILINE) multiText.SetInsertionPoint(0) +class MyApp(wx.App): + def OnInit(self): + frame = FMFframe(None,-1,"Pyphant Full-Metadata Format Viewer") + frame.Show(True) + return True + + def OnExit(self): + self.ExitMainLoop() + wx.Exit() + class FMFWriter(object): name='FMF Writer' def __init__(self, fieldContainer,show=True): Property changes on: trunk/src/workers/fmfile/fmfile/FMFWriter.py ___________________________________________________________________ Added: svn:executable + * Modified: trunk/src/workers/fmfile/setup.py =================================================================== --- trunk/src/workers/fmfile/setup.py 2009-04-03 12:48:08 UTC (rev 631) +++ trunk/src/workers/fmfile/setup.py 2009-04-03 12:48:38 UTC (rev 632) @@ -22,7 +22,7 @@ description = __doc__, install_requires=['pyphant>=0.4alpha3', 'ConfigObj' - ] + ], packages = ['fmfile'], entry_points = """ [pyphant.workers] This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <zk...@us...> - 2009-04-03 14:34:06
|
Revision: 633 http://pyphant.svn.sourceforge.net/pyphant/?rev=633&view=rev Author: zklaus Date: 2009-04-03 14:33:58 +0000 (Fri, 03 Apr 2009) Log Message: ----------- Merge branch 'master' into svn-trunk * master: Add: Added doc strings. Bugfix: Loading of FMF-Zipfiles preserves order of loaded files while handling the attributes. Renamed Pyphant window to "Pyphant Sprint" Bugfix: Method _getDataContainerURL is marked to be private (with underscore). Bugfix: Server ID is only generated once (at generation of knowledge manager) Cosmetics: Distinction of DataContainers and KnowledgeManagers in variable names. Modified Paths: -------------- trunk/src/pyphant/pyphant/core/KnowledgeManager.py trunk/src/pyphant/pyphant/wxgui2/wxPyphantApplication.py trunk/src/workers/fmfile/fmfile/FMFLoader.py Modified: trunk/src/pyphant/pyphant/core/KnowledgeManager.py =================================================================== --- trunk/src/pyphant/pyphant/core/KnowledgeManager.py 2009-04-03 12:48:38 UTC (rev 632) +++ trunk/src/pyphant/pyphant/core/KnowledgeManager.py 2009-04-03 14:33:58 UTC (rev 633) @@ -30,10 +30,58 @@ # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. u""" -knowledge manager +Knowledge Manager for Pyphant +============================= -- retrieve data from local HDF5 files for given emd5s -- ... +The ID of a DataContainer object is given by a emd5 string. + +Responsibilities: +----------------- + + - register HDF5 files by their URLs + - register remote knowledge managers by urls + - share data containers via HTTP, they are requested by id + - get references for these data containers (local or remote) + +If an operation fails, a KnowledgeManagerException +will be raised. These exceptions have a method + + .getParentException() + +in order to get additional information about the reason. + +Usage: +------ + + Get a reference to the KnowledgeManager instance, which is a + singleton: + + import pyphant.core.KnowledgeManager as KM + km = KM.KnowledgeManager.getInstance() + + Optionally: Start HTTP server for sharing data with others by + + km.startServer(<host>,<port>) + + Register a local HDF5 file: + + km.registerURL("file://tmp/data.h5") + + Register a remote HDF5 file: + + km.registerURL("http://example.com/repository/data.h5") + + Register another KnowledgeManager in order to benefit + from their knowledge (see arguments of .startServer): + + km.registerKnowledgeManager("http://example.com:8000") + + Request data container by its id: + + dc = km.getDataContainer(id) + + Use the data container! + """ __id__ = "$Id$" @@ -78,6 +126,9 @@ def __str__(self): return self._message+" (reason: %s)" % (str(self._parent_excep),) + def getParentException(self): + return self._parent_excep + class KnowledgeManager(Singleton): def __init__(self): @@ -86,6 +137,7 @@ self._refs = {} self._remoteKMs = {} # key:id, value:url self._server = None + self._server_id = uuid1() def __del__(self): if self.isServerRunning(): @@ -96,20 +148,26 @@ return None return "http://%s:%d" % (self._http_host, self._http_port) + def getServerId(self): + """Return uniqe id of the KnowledgeManager. + """ + return self._server_id + def startServer(self, host, port): - """Start HTTP server. + """Start the HTTP server. host -- full qualified domain name or IP address under which server can be contacted via HTTP - port -- port of HTTP server + port -- port of HTTP server (integer) + A temporary directory is generated in order to + save temporary HDF5 files. The data may be announced to other KnowledgeManagers. """ self._http_host = host self._http_port = port self._http_dir = tempfile.mkdtemp(prefix='pyphant-knowledgemanager') self._server = _HTTPServer((host,port),_HTTPRequestHandler) - self._server_id = uuid1() class _HTTPServerThread(threading.Thread): def run(other): @@ -122,7 +180,11 @@ def stopServer(self): + """Stop the HTTP server. + The temporary directory is removed. + """ + logger = self._logger if self.isServerRunning(): self._server.stop_server = True @@ -152,29 +214,52 @@ self._logger.warn("HTTP server should be stopped but isn't running.") def isServerRunning(self): + """Return whether HTTP server is running.""" return self._server is not None - def registerKnowledgeManager(self, url): + def registerKnowledgeManager(self, km_url): + """Register a knowledge manager. + + km_url -- url where another KnowledgeManager can be contacted, + form: http://<hostname>:<port> + + The remote KnowledgeManager is contacted immediately in order + to save its unique ID. + """ logger = self._logger try: # get unique id from KM via HTTP - logger.debug("Requesting ID from Knowledgemanager with URL '%s'...", url) + logger.debug("Requesting ID from Knowledgemanager with URL '%s'...", km_url) # request url for given id over http dummy_data = urllib.urlencode({'dummykey':'dummyvalue'}) - answer = urllib.urlopen(url+HTTP_REQUEST_KM_ID_PATH, dummy_data) + answer = urllib.urlopen(km_url+HTTP_REQUEST_KM_ID_PATH, dummy_data) logger.debug("Info from HTTP answer: %s", answer.info()) - id = answer.readline().strip() - logger.debug("ID read from HTTP answer: %s", id) + km_id = answer.readline().strip() + logger.debug("KM ID read from HTTP answer: %s", km_id) except Exception, e: raise KnowledgeManagerException( - "Couldn't get ID for knowledge manager under URL %s." % (url,),e) + "Couldn't get ID for knowledge manager under URL %s." % (km_url,),e) - self._remoteKMs[id] = url + self._remoteKMs[km_id] = km_url def registerURL(self, url): + """Register an HDF5 file downloadable from given URL. + + url -- URL of the HDF5 file + + The HDF5 file is downloaded and all DataContainers + in the file are registered with their identifiers. + """ self._retrieveURL(url) def registerDataContainer(self, datacontainer): + """Register a DataContainer located in memory using a given reference. + + datacontainer -- reference to the DataContainer object + + The DataContainer must have an .id attribute, + which could be generated by the datacontainer.seal() method. + """ try: assert datacontainer.id is not None self._refs[datacontainer.id] = datacontainer @@ -184,7 +269,14 @@ def _retrieveURL(self, url): + """Retrieve HDF5 file from a given URL. + url -- URL of the HDF5 file + + The HDF5 file is downloaded and all DataContainers + in the file are registered with their identifiers. + """ + self._logger.info("Retrieving url '%s'..." % (url,)) localfilename, headers = urllib.urlretrieve(url) self._logger.info("Using local file '%s'." % (localfilename,)) @@ -197,23 +289,32 @@ # title of 'result_' groups has id in TITLE attribute dc = None for group in h5.walkGroups(where="/results"): - id = group._v_attrs.TITLE - if len(id)>0: - self._logger.debug("Registering id '%s'.." % (id,)) - self._refs[id] = (url, localfilename, group._v_pathname) + dc_id = group._v_attrs.TITLE + if len(dc_id)>0: + self._logger.debug("Registering DC ID '%s'.." % (dc_id,)) + self._refs[dc_id] = (url, localfilename, group._v_pathname) h5.close() - def _retrieveRemoteKMs(self, id, omit_km_ids): - id_url = self._getURLFromRemoteKMs(id, omit_km_ids) - if id_url is None: + def _retrieveRemoteKMs(self, dc_id, omit_km_ids): + """Retrieve datacontainer by its id from remote KnowledgeManagers. + + dc_id -- unique id of the requested DataContainer + """ + dc_url = self._getURLFromRemoteKMs(dc_id, omit_km_ids) + if dc_url is None: raise KnowledgeManagerException( - "Couldn't retrieve ID '%s' from remote knowledgemanagers" % (id,)) + "Couldn't retrieve DC ID '%s' from remote knowledgemanagers" % (dc_id,)) else: - self._retrieveURL(id_url) + self._retrieveURL(dc_url) - def _getURLFromRemoteKMs(self, id, omit_km_ids): + def _getURLFromRemoteKMs(self, dc_id, omit_km_ids): + """Return URL for a DataContainer by requesting remote KnowledgeManagers. + dc_id -- ID of the requested DataContainer + omit_km_ids -- list of KnowledgeManager IDs which shouldn't be + asked + """ logger = self._logger # # build query for http request with @@ -221,7 +322,7 @@ # list of URLs which should not be requested by # the remote side # - query = { 'id': id} + query = { 'dcid': dc_id} idx = -1 # needed if omit_km_ids is empty for idx,km_id in enumerate(omit_km_ids): query['kmid%d' % (idx,)] = km_id @@ -234,9 +335,9 @@ # # ask every remote KnowledgeManager for id # - logger.debug("Requesting knowledge managers for id '%s'..." % (id,)) + logger.debug("Requesting knowledge managers for DC id '%s'..." % (dc_id,)) found = False - dc_id_url = None + dc_url = None for km_id, km_url in self._remoteKMs.iteritems(): if not (found or (km_id in omit_km_ids)): logger.debug( @@ -249,21 +350,31 @@ logger.debug("Info from HTTP answer: %s", answer.info()) found = not tmp.startswith("Failed") # TODO: check for code 404 instead! if found: - dc_id_url = tmp - logger.debug("URL for id read from HTTP answer: %s", dc_id_url) + dc_url = tmp + logger.debug("URL for id read from HTTP answer: %s", dc_url) else: # message for everyone: do not ask this KM again idx += 1 query['kmid%d' % (idx),] = km_id - return dc_id_url + return dc_url - def getDataContainerURL(self, id, omit_km_ids=[]): + def _getDataContainerURL(self, dc_id, omit_km_ids=[]): + """Return a URL from which a DataContainer can be downloaded. - if id in self._refs.keys(): - dc = self.getDataContainer(id, omit_km_ids=omit_km_ids) + dc_id -- ID of requested DataContainer + omit_km_ids -- list of KnowledgeManager IDs which shouldn't be + asked (Default: []) + The DataContainer can be downloaded as HDF5 file. + The server must be running before calling this method. + """ + assert self.isServerRunning(), "Server is not running." + + if dc_id in self._refs.keys(): + dc = self.getDataContainer(dc_id, omit_km_ids=omit_km_ids) + # # Wrap data container in temporary HDF5 file # @@ -276,55 +387,80 @@ resultsGroup = h5.createGroup("/", "results") ptp.saveResult(dc, h5) h5.close() - url = self._getServerURL()+"/"+os.path.basename(h5name) + dc_url = self._getServerURL()+"/"+os.path.basename(h5name) else: try: - url = self._getURLFromRemoteKMs(id, omit_km_ids) + dc_url = self._getURLFromRemoteKMs(dc_id, omit_km_ids) except Exception, e: raise KnowledgeManagerException( - "URL for ID '%s' not found." % (id,), e) - return url + "URL for DC ID '%s' not found." % (dc_id,), e) + return dc_url - def getDataContainer(self, id, try_cache=True, omit_km_ids=[]): - if id not in self._refs.keys(): - # raise KnowledgeManagerException("Id '%s'unknown."%(id,)) + def getDataContainer(self, dc_id, try_cache=True, omit_km_ids=[]): + """Request reference on DataContainer having the given id. + + dc_id -- Unique ID of the DataContainer + try_cache -- Try local cache first (default: True) + omit_km_ids -- list of KnowledgeManager IDs which shouldn't be + asked (Default: []) + """ + if dc_id not in self._refs.keys(): + # raise KnowledgeManagerException("DC ID '%s'unknown."%(dc_id,)) try: - self._retrieveRemoteKMs(id, omit_km_ids) + self._retrieveRemoteKMs(dc_id, omit_km_ids) except Exception, e: raise KnowledgeManagerException( - "Id '%s' unknown." % (id,), e) + "DC ID '%s' unknown." % (dc_id,), e) - ref = self._refs[id] + ref = self._refs[dc_id] if isinstance(ref, TupleType): - dc = self._getDCfromURLRef(id, try_cache = try_cache) + dc = self._getDCfromURLRef(dc_id, try_cache = try_cache) else: dc = ref return dc - def _getDCfromURLRef(self, id, try_cache=True, omit_km_ids=[]): - url, localfilename, h5path = self._refs[id] + def _getDCfromURLRef(self, dc_id, try_cache=True, omit_km_ids=[]): + """Return DataContainer. + + dc_id -- Unique ID of the DataContainer + try_cache -- Try local cache first (default: True) + omit_km_ids -- list of KnowledgeManager IDs which shouldn't be + asked (Default: []) + + The following request order is used: + + 1. Use local cache file, if available (only for try_cache=True) + 2. Try to download HDF5 file (again). + 3. Ask remote KnowledgeManagers for the given ID. + Download the DataContainer as HDF5 file (if available). + + Afterwards open the file and extract the DataContainer. + The given dc_id must be known to the KnowledgeManager. + """ + dc_url, localfilename, h5path = self._refs[dc_id] if not try_cache: os.remove(localfilename) if not os.path.exists(localfilename): try: # download URL and save ids as references - self._retrieveURL(url) + self._retrieveURL(dc_url) except Exception, e_url: try: - self._retrieveRemoteKMs(id, omit_km_ids) + self._retrieveRemoteKMs(dc_id, omit_km_ids) except Exception, e_rem: raise KnowledgeManagerException( - "Id '%s' not found on remote sites."%(id,), + "DC ID '%s' not found on remote sites."% (dc_id,), KnowledgeManagerException( - "Id '%s' could not be resolved using URL '%s'"%(id, url)), e_url) + "DC ID '%s' could not be resolved using URL '%s'" \ + % (dc_id, dc_url)), e_url) - url, localfilename, h5path = self._refs[id] + dc_url, localfilename, h5path = self._refs[dc_id] h5 = tables.openFile(localfilename) - hash, type = parseId(id) + hash, type = parseId(dc_id) assert type in ['sample','field'] if type=='sample': loader = ptp.loadSample @@ -337,8 +473,9 @@ self._logger.debug("Loading data from '%s' in file '%s'.." % (localfilename, h5path)) dc = loader(h5, h5.getNode(h5path)) except Exception, e: - raise KnowledgeManagerException("Id '%s' known, but cannot be read from file '%s'." \ - % (id,localfilename), e) + raise KnowledgeManagerException( + "DC ID '%s' known, but cannot be read from file '%s'." \ + % (dc_id,localfilename), e) finally: h5.close() return dc @@ -368,6 +505,7 @@ def _do_POST_request_km_id(self): + """Return the KnowledgeManager ID.""" km = _HTTPRequestHandler._knowledge_manager code = 200 @@ -376,26 +514,27 @@ return code, answer def _do_POST_request_dc_url(self): + """Return a URL for a given DataContainer ID.""" if self.headers.has_key('content-length'): length= int( self.headers['content-length'] ) query = self.rfile.read(length) query_dict = cgi.parse_qs(query) - id = query_dict['id'][0] + dc_id = query_dict['dcid'][0] omit_km_ids = [ value[0] for (key,value) in query_dict.iteritems() - if key!='id'] - self._logger.debug("Query data: id: %s, omit_km_ids: %s", - id, omit_km_ids) + if key!='dcid'] + self._logger.debug("Query data: dc_id: %s, omit_km_ids: %s", + dc_id, omit_km_ids) try: km = _HTTPRequestHandler._knowledge_manager code = 200 - answer = km.getDataContainerURL(id, omit_km_ids) + answer = km._getDataContainerURL(dc_id, omit_km_ids) self._logger.debug("Returning URL '%s'...", answer) except Exception, e: self._logger.warn("Catched exception: %s", traceback.format_exc()) code = 404 - answer = "Failed: Id '%s' not found." % (id,) # 'Failed' significant! + answer = "Failed: DC ID '%s' not found." % (dc_id,) # 'Failed' significant! else: code = 404 answer = "Cannot interpret query." @@ -403,7 +542,8 @@ return code, answer def do_GET(self): - """Serve a GET request.""" + """Return a requested HDF5 from temporary directory. + """ log = self._logger f = self.send_head() if f: @@ -418,7 +558,8 @@ def send_head(self): # see SimpleHTTPServer.SimpleHTTPRequestHandler - + """Send header for HDF5 file request. + """ log = self._logger km = _HTTPRequestHandler._knowledge_manager @@ -445,7 +586,8 @@ class _HTTPServer(ThreadingMixIn,HTTPServer): - + """Threaded HTTP Server for the KnowledgeManager. + """ stop_server = False _logger = logging.getLogger("pyphant") Modified: trunk/src/pyphant/pyphant/wxgui2/wxPyphantApplication.py =================================================================== --- trunk/src/pyphant/pyphant/wxgui2/wxPyphantApplication.py 2009-04-03 12:48:38 UTC (rev 632) +++ trunk/src/pyphant/pyphant/wxgui2/wxPyphantApplication.py 2009-04-03 14:33:58 UTC (rev 633) @@ -34,7 +34,7 @@ __id__ = "$Id$" __author__ = "$Author$" -__version__ = "$Revision$" +__version__ = "Sprint" # $Source$ import os, os.path, pkg_resources Modified: trunk/src/workers/fmfile/fmfile/FMFLoader.py =================================================================== --- trunk/src/workers/fmfile/fmfile/FMFLoader.py 2009-04-03 12:48:38 UTC (rev 632) +++ trunk/src/workers/fmfile/fmfile/FMFLoader.py 2009-04-03 14:33:58 UTC (rev 633) @@ -60,6 +60,7 @@ def loadDataFromZip(filename, subscriber=1): z = zipfile.ZipFile(filename, 'r') names = z.namelist() + names.sort() total = len(names) assert total>0, "The loaded FMF archive named %s does not contain any files." % filename data = {} @@ -71,17 +72,17 @@ else: data[pixelName] = rawContainer subscriber %= float(i+1)/total*100.0 - return data + return data,names -def collectAttributes(data): +def collectAttributes(data,names): """Function collectAttributes(data) data: dictionary referencing the FMF attributes by the respective filenames returns tupple (dictionary of common attributes, dictionary of varibale attributes) """ #Collect attributes, define filename as new attribute - atts = {u'filename': []} - for filename,sc in data.iteritems(): - atts['filename'].append(filename) + atts = {u'filename': names} + for filename in names: + sc = data[filename] for section,sectionDict in sc.attributes.iteritems(): for key,treetoken in sectionDict.iteritems(): attlist = atts.setdefault(key, []) @@ -205,8 +206,8 @@ return reference def readZipFile(filename, subscriber=1): - data = loadDataFromZip(filename, subscriber) - commonAttr, variableAttr = collectAttributes(data) + data,names = loadDataFromZip(filename, subscriber) + commonAttr, variableAttr = collectAttributes(data,names) #Wrap variable attributes into FieldContainer containers = [ column2FieldContainer(longname, column) for longname, column in variableAttr.iteritems()] #Process SampleContainers of parsed FMF files and skip independent variables, which are used as dimensions. @@ -237,7 +238,6 @@ assert newField.isValid() containers.append(newField) result = DataContainer.SampleContainer(containers,attributes=commonAttr) - print "FMFLoader",result.attributes return result def reshapeField(field): This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <zk...@us...> - 2009-04-20 19:55:54
|
Revision: 637 http://pyphant.svn.sourceforge.net/pyphant/?rev=637&view=rev Author: zklaus Date: 2009-04-20 19:55:50 +0000 (Mon, 20 Apr 2009) Log Message: ----------- Merge branch 'master' into svn-trunk * master: Bugfix: Integrate tools tests package. Enh: Added TestEmd5Src. Enh: Removed use of deprecated module md5 Modified Paths: -------------- trunk/src/pyphant/pyphant/core/DataContainer.py trunk/src/pyphant/pyphant/core/FieldContainer.py trunk/src/workers/tools/setup.py Added Paths: ----------- trunk/src/workers/tools/tools/tests/ trunk/src/workers/tools/tools/tests/TestEmd5Source.py trunk/src/workers/tools/tools/tests/__init__.py Modified: trunk/src/pyphant/pyphant/core/DataContainer.py =================================================================== --- trunk/src/pyphant/pyphant/core/DataContainer.py 2009-04-20 09:57:07 UTC (rev 636) +++ trunk/src/pyphant/pyphant/core/DataContainer.py 2009-04-20 19:55:50 UTC (rev 637) @@ -68,7 +68,7 @@ __author__ = "$Author$" __version__ = "$Revision$" -import scipy, copy, md5, threading, numpy, StringIO +import scipy, copy, hashlib, threading, numpy, StringIO import os, platform, datetime, socket, urlparse from pyphant.quantities.PhysicalQuantities import (isPhysicalQuantity, PhysicalQuantity,_prefixes) @@ -220,7 +220,7 @@ label=property(_getLabel) def generateHash(self): - m = md5.new() + m = hashlib.md5() m.update(u''.join([c.hash for c in self.columns])) m.update(str(self.attributes)) m.update(self.longname) Modified: trunk/src/pyphant/pyphant/core/FieldContainer.py =================================================================== --- trunk/src/pyphant/pyphant/core/FieldContainer.py 2009-04-20 09:57:07 UTC (rev 636) +++ trunk/src/pyphant/pyphant/core/FieldContainer.py 2009-04-20 19:55:50 UTC (rev 637) @@ -57,7 +57,7 @@ __author__ = "$Author$" __version__ = "$Revision$" -import scipy, copy, md5, threading, numpy, StringIO +import scipy, copy, hashlib, threading, numpy, StringIO import os, platform, datetime, socket, urlparse from pyphant.quantities.PhysicalQuantities import (isPhysicalQuantity, PhysicalQuantity,_prefixes) from DataContainer import DataContainer, enc, _logger @@ -71,7 +71,7 @@ map(lambda r: PREFIXES.remove(r),[('d', 1.e-1),('c', 1.e-2)]) class IndexMarker(object): - hash=md5.new().hexdigest() + hash = hashlib.md5().hexdigest() shortname=u"i" longname=u"index" def seal(self, id=None): @@ -270,7 +270,7 @@ return res def generateHash(self): - m = md5.new() + m = hashlib.md5() m.update(str(self.data.tolist())) m.update(str(self.unit)) if self.error!=None: Modified: trunk/src/workers/tools/setup.py =================================================================== --- trunk/src/workers/tools/setup.py 2009-04-20 09:57:07 UTC (rev 636) +++ trunk/src/workers/tools/setup.py 2009-04-20 19:55:50 UTC (rev 637) @@ -25,5 +25,5 @@ entry_points = """ [pyphant.workers] myeentry = tools - """) - #test_suite='OSC.tests') + """, + test_suite='tools.tests') Added: trunk/src/workers/tools/tools/tests/TestEmd5Source.py =================================================================== --- trunk/src/workers/tools/tools/tests/TestEmd5Source.py (rev 0) +++ trunk/src/workers/tools/tools/tests/TestEmd5Source.py 2009-04-20 19:55:50 UTC (rev 637) @@ -0,0 +1,75 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) 2008, Rectorate of the University of Freiburg +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the Freiburg Materials Research Center, +# University of Freiburg nor the names of its contributors may be used to +# endorse or promote products derived from this software without specific +# prior written permission. +# +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER +# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +u"""Provides unittest classes TestMRA and TestMRADiscontinuousDiscretisation. +""" + +__id__ = "$Id$" +__author__ = "$Author$" +__version__ = "$Revision$" +# $Source$ + + +import sys,copy +import unittest + +import pkg_resources +pkg_resources.require("pyphant") +pkg_resources.require("pyphant.tools") + +import os.path + +import numpy, scipy, scipy.optimize +import pyphant.quantities.PhysicalQuantities as pq +from pyphant.core import DataContainer as DC + + +class TestEmd5Source(unittest.TestCase): + """Sets up a random FieldContainer and ragisters it with the + knowledge manager.""" + def setUp(self): + self.V = DC.FieldContainer(numpy.random.randn(10,10)) + self.V.seal() + from pyphant.core import KnowledgeManager + KnowledgeManager.KnowledgeManager.getInstance().registerDataContainer(self.V) + + def testEmd5Source(self): + """Retrieves the previously registered FieldContainer via the + Emd5Source and checks for equality.""" + #Predict result + from tools import Emd5Src + s = Emd5Src.Emd5Src() + s.paramDc.value = self.V.id + result = s.plugLoad.getResult() + self.assertEqual(result, self.V) + +if __name__ == '__main__': + unittest.main() Added: trunk/src/workers/tools/tools/tests/__init__.py =================================================================== --- trunk/src/workers/tools/tools/tests/__init__.py (rev 0) +++ trunk/src/workers/tools/tools/tests/__init__.py 2009-04-20 19:55:50 UTC (rev 637) @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) 2006-2007, Rectorate of the University of Freiburg +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the Freiburg Materials Research Center, +# University of Freiburg nor the names of its contributors may be used to +# endorse or promote products derived from this software without specific +# prior written permission. +# +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER +# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +u""" +""" + +__id__ = "$Id$" +__author__ = "$Author$" +__version__ = "$Revision$" +# $Source$ + This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <zk...@us...> - 2009-05-10 19:25:40
|
Revision: 643 http://pyphant.svn.sourceforge.net/pyphant/?rev=643&view=rev Author: zklaus Date: 2009-05-10 19:25:28 +0000 (Sun, 10 May 2009) Log Message: ----------- Merge branch 'master' into svn-trunk * master: Fix: Adapt TestEmd5Source to new Emd5Source. Enh: Better handling of initial h5 loading. Modified Paths: -------------- trunk/src/pyphant/pyphant/wxgui2/wxPyphantApplication.py trunk/src/workers/tools/tools/tests/TestEmd5Source.py Modified: trunk/src/pyphant/pyphant/wxgui2/wxPyphantApplication.py =================================================================== --- trunk/src/pyphant/pyphant/wxgui2/wxPyphantApplication.py 2009-05-10 19:11:06 UTC (rev 642) +++ trunk/src/pyphant/pyphant/wxgui2/wxPyphantApplication.py 2009-05-10 19:25:28 UTC (rev 643) @@ -178,18 +178,18 @@ dlg.Destroy() import PyphantCanvas - try: - if self._wxPyphantApp.pathToRecipe[-3:] == '.h5': + if self._wxPyphantApp.pathToRecipe[-3:] == '.h5': + if os.path.exists(self._wxPyphantApp.pathToRecipe): recipe = pyphant.core.PyTablesPersister.loadRecipeFromHDF5File(self._wxPyphantApp.pathToRecipe) from pyphant.core import KnowledgeManager KnowledgeManager.KnowledgeManager.getInstance().registerURL( "file:///"+os.path.realpath(self._wxPyphantApp.pathToRecipe) ) + self._remainingSpace=PyphantCanvas.PyphantCanvas(self, recipe) else: - raise IOError("Unknown file format in file \""+self._wxPyphantApp.pathToRecipe+"\"") - self._remainingSpace=PyphantCanvas.PyphantCanvas(self, recipe) - except IOError, error: - self._remainingSpace=PyphantCanvas.PyphantCanvas(self) + self._remainingSpace=PyphantCanvas.PyphantCanvas(self) + else: + raise IOError("Unknown file format in file \""+self._wxPyphantApp.pathToRecipe+"\"") self.recipeState='clean' self._remainingSpace.diagram.recipe.registerListener(self.recipeChanged) Modified: trunk/src/workers/tools/tools/tests/TestEmd5Source.py =================================================================== --- trunk/src/workers/tools/tools/tests/TestEmd5Source.py 2009-05-10 19:11:06 UTC (rev 642) +++ trunk/src/workers/tools/tools/tests/TestEmd5Source.py 2009-05-10 19:25:28 UTC (rev 643) @@ -67,7 +67,8 @@ #Predict result from tools import Emd5Src s = Emd5Src.Emd5Src() - s.paramDc.value = self.V.id + s.paramEmd5.value = self.V.id + s.paramSelectby.value = u'emd5' result = s.plugLoad.getResult() self.assertEqual(result, self.V) This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <zk...@us...> - 2009-05-14 13:01:43
|
Revision: 646 http://pyphant.svn.sourceforge.net/pyphant/?rev=646&view=rev Author: zklaus Date: 2009-05-14 12:34:12 +0000 (Thu, 14 May 2009) Log Message: ----------- Merge branch 'master' into svn-trunk * master: Fix: Adapted Histogram worker to changed numpy semantics. Fix: Added masking of mask to SampleContainer.filter Modified Paths: -------------- trunk/src/pyphant/pyphant/core/DataContainer.py trunk/src/workers/Statistics/Statistics/Histogram.py Modified: trunk/src/pyphant/pyphant/core/DataContainer.py =================================================================== --- trunk/src/pyphant/pyphant/core/DataContainer.py 2009-05-11 22:38:35 UTC (rev 645) +++ trunk/src/pyphant/pyphant/core/DataContainer.py 2009-05-14 12:34:12 UTC (rev 646) @@ -838,21 +838,32 @@ if d.data != None: md.data = d.data[numpymask] + #mask mask of dimensions + if d.mask != None: + md.mask = d.mask[numpymask] + mdims.append(md) #mask errors: cerr = None - if c.error != None: cerr = c.error[numpymask] + if c.error != None: + cerr = c.error[numpymask] + #mask errors: + cmask = None + if c.mask != None: + cmask = c.mask[numpymask] + #mask data: cdata = None - if c.data != None: cdata = c.data[numpymask] + if c.data != None: + cdata = c.data[numpymask] maskedcolumns.append( FieldContainer(cdata, copy.deepcopy(c.unit), cerr, - copy.deepcopy(c.mask), + cmask, mdims, longname=c.longname, shortname=c.shortname, Modified: trunk/src/workers/Statistics/Statistics/Histogram.py =================================================================== --- trunk/src/workers/Statistics/Statistics/Histogram.py 2009-05-11 22:38:35 UTC (rev 645) +++ trunk/src/workers/Statistics/Statistics/Histogram.py 2009-05-14 12:34:12 UTC (rev 646) @@ -59,7 +59,8 @@ bins=self.paramBins.value histo= numpy.histogram(vector.data, bins, range=(numpy.floor(vector.data.min()), numpy.ceil(vector.data.max()))) - xdim = DataContainer.FieldContainer(histo[1], vector.unit, + binCenters = histo[1][:-1]+((histo[1][1:]-histo[1][:-1])/2.0) + xdim = DataContainer.FieldContainer(binCenters, vector.unit, longname=vector.longname, shortname=vector.shortname) result = DataContainer.FieldContainer(histo[0], dimensions=[xdim], This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <zk...@us...> - 2009-06-24 14:53:25
|
Revision: 648 http://pyphant.svn.sourceforge.net/pyphant/?rev=648&view=rev Author: zklaus Date: 2009-06-24 13:56:20 +0000 (Wed, 24 Jun 2009) Log Message: ----------- Merge branch 'master' into svn-trunk * master: Fix: Started repairing the histogram worker. Cosm: Code improvements in the spirit of PEP8. Cosm: Code improvements in the spirit of PEP8. Modified Paths: -------------- trunk/src/pyphant/pyphant/core/KnowledgeManager.py trunk/src/workers/Statistics/Statistics/Histogram.py trunk/src/workers/Statistics/Statistics/tests/TestHistogram.py Modified: trunk/src/pyphant/pyphant/core/KnowledgeManager.py =================================================================== --- trunk/src/pyphant/pyphant/core/KnowledgeManager.py 2009-06-24 12:43:25 UTC (rev 647) +++ trunk/src/pyphant/pyphant/core/KnowledgeManager.py 2009-06-24 13:56:20 UTC (rev 648) @@ -212,8 +212,8 @@ registerContents -- whether to register contents of the file as well. """ if self.H5FileHandlers.has_key(filename): - raise KnowledgeManagerException("'%s' has already been registered."\ - % (filename, )) + raise KnowledgeManagerException("'%s' has already been registered." + % filename) self.H5FileHandlers[filename] = H5FileHandler(filename, mode) if registerContents: self.refreshH5(filename) @@ -312,8 +312,8 @@ """ logger = self._logger if self.isServerRunning(): - logger.warn("Server is running at host %s, port %d already. \ -Stopping server...", self._http_host, self._http_port) + logger.warn("Server is running at host %s, port %d already. " + "Stopping server...", self._http_host, self._http_port) self.stopServer() self._http_host = host self._http_port = port @@ -324,8 +324,8 @@ self._server.start() self._http_server_thread = _HTTPServerThread() self._http_server_thread.start() - self._logger.debug("Started HTTP server. Host: %s, port: %d, \ -temp dir: %s", host, port, self._http_dir) + self._logger.debug("Started HTTP server. Host: %s, port: %d, " + "temp dir: %s", host, port, self._http_dir) self.web_interface.disabled = not provide_web_frontend def stopServer(self): @@ -340,8 +340,8 @@ try: urllib.urlopen(self._getServerURL()) except: - logger.warn("Fake HTTP request failed when stopping HTTP \ -server.") + logger.warn("Fake HTTP request failed when stopping HTTP " + "server.") logger.info("Waiting for HTTP server thread to die...") self._http_server_thread.join(WAITING_SECONDS_HTTP_SERVER_STOP) if self._http_server_thread.isAlive(): @@ -360,8 +360,8 @@ self._http_dir) self._http_dir = None else: - self._logger.warn("HTTP server should be stopped but isn't \ -running.") + self._logger.warn("HTTP server should be stopped but isn't " + "running.") def isServerRunning(self): """ @@ -405,8 +405,8 @@ logger.debug("KM ID read from HTTP answer: %s", km_id) except Exception, excep: raise KnowledgeManagerException( - "Couldn't get ID for knowledge manager under URL %s."\ - % (km_url, ), excep) + "Couldn't get ID for knowledge manager under URL %s." + % (km_url, ), excep) self._remoteKMs[km_id] = km_url def registerURL(self, url): @@ -419,8 +419,8 @@ url -- URL of the HDF5 or FMF file """ parsed = urlparse(url) - filename = KM_PATH + 'registered/' + parsed[1] + '/'\ - + os.path.basename(parsed[2]) + filename = KM_PATH + 'registered/' + parsed[1] + '/' + filename += os.path.basename(parsed[2]) directory = os.path.dirname(filename) filename = getPyphantPath(directory) + os.path.basename(filename) if os.path.exists(filename): @@ -492,10 +492,9 @@ logger = self._logger # add this KM to query query_dict['lastkmidindex'] += 1 - query_dict['kmid%d' % (query_dict['lastkmidindex'], )] =\ - self.getServerId() + query_dict['kmid%d' % query_dict['lastkmidindex']] = self.getServerId() # ask every remote KnowledgeManager for id - logger.debug("Requesting knowledge managers for DC id '%s'..."\ + logger.debug("Requesting knowledge managers for DC id '%s'..." % (query_dict['dcid'], )) dc_url = None for km_id, km_url in self._remoteKMs.iteritems(): @@ -610,8 +609,11 @@ try: dc = dcinfo['filehandler'].loadDataContainer(dc_id) except Exception, excep: - raise KnowledgeManagerException("DC ID '%s' known, but \ -cannot be read from file '%s'." % (dc_id, localfilename), excep) + raise KnowledgeManagerException("DC ID '%s' known, but " + "cannot be read from file " + "'%s'." % (dc_id, + localfilename), + excep) if use_cache and dcinfo['hitcount'] >= CACHE_THRESHHOLD: docache = False if len(self._cache) >= CACHE_SIZE: @@ -636,8 +638,8 @@ dc_url = self._getDCURLFromRemoteKMs({'dcid':dc_id, 'lastkmidindex':-1}) if dc_url == None: - raise KnowledgeManagerException("DC ID '%s' is unknown."\ - %(dc_id,)) + raise KnowledgeManagerException("DC ID '%s' is unknown." + % (dc_id,)) filename = getFilenameFromDcId(dc_id) urllib.urlretrieve(dc_url, filename) self.registerH5(filename) Modified: trunk/src/workers/Statistics/Statistics/Histogram.py =================================================================== --- trunk/src/workers/Statistics/Statistics/Histogram.py 2009-06-24 12:43:25 UTC (rev 647) +++ trunk/src/workers/Statistics/Statistics/Histogram.py 2009-06-24 13:56:20 UTC (rev 648) @@ -56,15 +56,23 @@ @Worker.plug(Connectors.TYPE_IMAGE) def calculateHistogram(self, vector, subscriber=0): - bins=self.paramBins.value - histo= numpy.histogram(vector.data, bins, range=(numpy.floor(vector.data.min()), - numpy.ceil(vector.data.max()))) - binCenters = histo[1][:-1]+((histo[1][1:]-histo[1][:-1])/2.0) + bins = self.paramBins.value + try: + histo = numpy.histogram(vector.data.flat, bins, new=True, + range=(numpy.floor(vector.data.min()), + numpy.ceil(vector.data.max()))) + binCenters = histo[1][:-1]+(numpy.diff(histo[1])/2.0) + except TypeError: + histo = numpy.histogram(vector.data.flat, bins, + range=(numpy.floor(vector.data.min()), + numpy.ceil(vector.data.max()))) + binCenters = histo[1]+((histo[1][1]-histo[1][0])/2.0) xdim = DataContainer.FieldContainer(binCenters, vector.unit, longname=vector.longname, shortname=vector.shortname) result = DataContainer.FieldContainer(histo[0], dimensions=[xdim], - longname=u"Histogram of %s"%vector.longname, + longname=u"Histogram of %s" + % vector.longname, shortname=u"h") result.seal() return result Modified: trunk/src/workers/Statistics/Statistics/tests/TestHistogram.py =================================================================== --- trunk/src/workers/Statistics/Statistics/tests/TestHistogram.py 2009-06-24 12:43:25 UTC (rev 647) +++ trunk/src/workers/Statistics/Statistics/tests/TestHistogram.py 2009-06-24 13:56:20 UTC (rev 648) @@ -63,14 +63,16 @@ self.dim = dim self.worker = S.Histogram(None) self.accuracyLevel = -3 - self.uniform = DataContainer.FieldContainer( uniformSample, - unit = '1 V', - longname='Uniform noise', - shortname='g') - self.norm = DataContainer.FieldContainer( normalSample, - unit = '1 V', - longname='Gaussian white noise', - shortname='w') + self.uniform = DataContainer.FieldContainer( + uniformSample, + unit = '1 V', + longname='Uniform noise', + shortname='g' ) + self.norm = DataContainer.FieldContainer( + normalSample, + unit = '1 V', + longname='Gaussian white noise', + shortname='w' ) self.uniform.seal() def testUniform(self): @@ -85,7 +87,9 @@ result.data, decimal=self.accuracyLevel) self.failUnless(result.dimensions[0].unit == self.uniform.unit, - "Unit of result's dimension [%s] has to match the unit of the input data [%s]." % (result.dimensions[0].unit, self.uniform.unit)) + "Unit of result's dimension [%s] has to match " + "the unit of the input data [%s]." + % (result.dimensions[0].unit, self.uniform.unit)) def testNormal(self): """Tests the correct evaluation of a Gaussian white noise sample.""" @@ -107,7 +111,9 @@ result.data, decimal=self.accuracyLevel) self.failUnless(result.dimensions[0].unit == self.norm.unit, - "Unit of result's dimension [%s] has to match the unit of the input data [%s]." % (result.dimensions[0].unit, self.norm.unit)) + "Unit of result's dimension [%s] has to match " + "the unit of the input data [%s]." + % (result.dimensions[0].unit, self.norm.unit)) if __name__ == '__main__': unittest.main() This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <zk...@us...> - 2009-07-20 23:07:10
|
Revision: 651 http://pyphant.svn.sourceforge.net/pyphant/?rev=651&view=rev Author: zklaus Date: 2009-07-20 23:06:59 +0000 (Mon, 20 Jul 2009) Log Message: ----------- Merge branch 'master' into svn-trunk * master: Enh: Support manual color scaling in ImageVisualizer. Modified Paths: -------------- trunk/src/pyphant/pyphant/visualizers/ImageVisualizer.py trunk/src/workers/OSC/OSC/OscAbsorption.py Modified: trunk/src/pyphant/pyphant/visualizers/ImageVisualizer.py =================================================================== --- trunk/src/pyphant/pyphant/visualizers/ImageVisualizer.py 2009-07-20 21:08:00 UTC (rev 650) +++ trunk/src/pyphant/pyphant/visualizers/ImageVisualizer.py 2009-07-20 23:06:59 UTC (rev 651) @@ -111,8 +111,20 @@ ymax=scipy.amax(y) #Support for images with non uniform axes adapted from python-matplotlib-doc/examples/pcolor_nonuniform.py ax = self.figure.add_subplot(111) + vmin = self.fieldContainer.attributes.get('vmin', None) + vmax = self.fieldContainer.attributes.get('vmax', None) + if vmin is not None: + vmin /= self.fieldContainer.unit + if vmax is not None: + vmax /= self.fieldContainer.unit if MPL_LT_0_98_1 or self.fieldContainer.isLinearlyDiscretised(): - pylab.imshow(self.fieldContainer.maskedData, extent=(xmin, xmax, ymin, ymax), origin='lower', interpolation='nearest',aspect='auto') + pylab.imshow(self.fieldContainer.maskedData, + aspect='auto', + interpolation='nearest', + vmin=vmin, + vmax=vmax, + origin='lower', + extent=(xmin, xmax, ymin, ymax)) pylab.colorbar(format=F(self.fieldContainer), ax=ax) else: im = NonUniformImage(ax, extent=(xmin,xmax,ymin,ymax)) @@ -120,6 +132,10 @@ ax.images.append(im) ax.set_xlim(xmin,xmax) ax.set_ylim(ymin,ymax) + if vmin is not None or vmax is not None: + im.set_clim(vmin, vmax) + else: + im.autoscale_None() pylab.colorbar(im,format=F(self.fieldContainer), ax=ax) pylab.xlabel(self.fieldContainer.dimensions[-1].shortlabel) pylab.ylabel(self.fieldContainer.dimensions[-2].shortlabel) Modified: trunk/src/workers/OSC/OSC/OscAbsorption.py =================================================================== --- trunk/src/workers/OSC/OSC/OscAbsorption.py 2009-07-20 21:08:00 UTC (rev 650) +++ trunk/src/workers/OSC/OSC/OscAbsorption.py 2009-07-20 23:06:59 UTC (rev 651) @@ -162,7 +162,10 @@ ("yAxis", u"y-Axis", [u"vertical_table_position"], None), ("field", u"Field", [u"thickness"], None), ("extentX", u"Extension of x-axis [%%]", 10, None), - ("extentY", u"Extension of y-axis [%%]", 10, None)] + ("extentY", u"Extension of y-axis [%%]", 10, None), + ("overrideV", u"Override value limits", False, None), + ("vmin", u"Minimal value", "0 nm", None), + ("vmax", u"Maximal value", "100 nm", None)] def inithook(self): self._logger = logging.getLogger("pyphant") @@ -180,23 +183,31 @@ yOff, yStep, yInd = grid2Index(yf, self.paramExtentY.value) xMax = xInd.maxV yMax = yInd.maxV - xDim = DataContainer.FieldContainer( numpy.linspace(xInd.minV,xInd.maxV,xInd.stepCount)-0.5*xStep, xCon.unit, - longname = xCon.longname, shortname = xCon.shortname ) - yDim = DataContainer.FieldContainer( numpy.linspace(yInd.minV,yInd.maxV,yInd.stepCount)-0.5*yStep, yCon.unit, - longname = yCon.longname, shortname = yCon.shortname ) - img = numpy.ones((yInd.stepCount, xInd.stepCount), dtype='float')*numpy.NaN + xDim = DataContainer.FieldContainer( + numpy.linspace(xInd.minV,xInd.maxV,xInd.stepCount)-0.5*xStep, + xCon.unit, + longname = xCon.longname, shortname = xCon.shortname ) + yDim = DataContainer.FieldContainer( + numpy.linspace(yInd.minV,yInd.maxV,yInd.stepCount)-0.5*yStep, + yCon.unit, + longname = yCon.longname, shortname = yCon.shortname ) + img = numpy.ones((yInd.stepCount, xInd.stepCount), + dtype='float')*numpy.NaN mask = numpy.ones((yInd.stepCount, xInd.stepCount), dtype='bool') for i in xrange(xf.size): xi = xInd[xf[i]] yi = yInd[yf[i]] if not mask[yi, xi]: - self._logger.warning("Duplicate data for pixel (%.4g,%.4g). Using first found value. Is your data corrupt?"%(xf[i],yf[i])) + self._logger.warning("Duplicate data for pixel (%.4g,%.4g). " + "Using first found value. " + "Is your data corrupt?"%(xf[i],yf[i])) else: img[yi, xi] = h[i] if h[i]>0: mask[yi, xi] = False - result = DataContainer.FieldContainer( img, fCon.unit, mask=mask, dimensions=[yDim, xDim], - longname=u'Map of %s'%fCon.longname, shortname=fCon.shortname) + result = DataContainer.FieldContainer( + img, fCon.unit, mask=mask, dimensions=[yDim, xDim], + longname=u'Map of %s'%fCon.longname, shortname=fCon.shortname) return result @Worker.plug(Connectors.TYPE_IMAGE) @@ -211,5 +222,15 @@ con.data = con.data.astype('float') xf, yf, h = tuple([ con.data for con in cons ]) result = self.calcNormal(osc, xCon, yCon, fCon, xf, yf, h) + if self.paramOverrideV.value: + vs = self.paramVmin.value, self.paramVmax.value + from pyphant.quantities.PhysicalQuantities import ( + isPhysicalQuantity, PhysicalQuantity) + try: + vs = [PhysicalQuantity(v) for v in vs] + except SyntaxError: + vs = [float(v) for v in vs] + result.attributes['vmin'] = vs[0] + result.attributes['vmax'] = vs[1] result.seal() return result This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <zk...@us...> - 2009-07-21 13:41:35
|
Revision: 653 http://pyphant.svn.sourceforge.net/pyphant/?rev=653&view=rev Author: zklaus Date: 2009-07-21 13:41:23 +0000 (Tue, 21 Jul 2009) Log Message: ----------- Merge branch 'master' into svn-trunk * master: (37 commits) Fix: Separated gradient filter from NDImageWorker Fix: Moved GSInverter to NDImage worker Fix: Fixed unit of EnhanceContrast worker Fix: Removed unused parameter from FindLocalExtrema worker Cosm: Renamed variables in NDImageWorker Fix: Removed Gradient worker Fix: Removed broken filters from NDImage worker Cosm: Changed longnames of output FCs of several workers Fix: Removed registration of recipes in wxPyphant Enh: Added MeasureFocus worker Enh: Changed method for finding extrema Cosm: Removed commentary in NDImage worker Enh: Added parameter 'tolerance' to FindLocalExtrema Worker Enh: Added workers FindLocalExtrema and Watershed (both preliminary) Enh: Added new filters to NDImage worker Enh: Added new filters to NDImage worker Fix: EnhanceContrast worker returns proper types now Fix: Fixed bug in FitBackground worker Enh: Added some filters to NDImage worker Fix: EnhanceContrast worker uses linear stretching now ... Modified Paths: -------------- trunk/src/pyphant/pyphant/core/H5FileHandler.py trunk/src/pyphant/pyphant/wxgui2/wxPyphantApplication.py trunk/src/workers/ImageProcessing/ImageProcessing/__init__.py Added Paths: ----------- trunk/src/workers/ImageProcessing/ImageProcessing/EnhanceContrast.py trunk/src/workers/ImageProcessing/ImageProcessing/FindLocalExtrema.py trunk/src/workers/ImageProcessing/ImageProcessing/FitBackground.py trunk/src/workers/ImageProcessing/ImageProcessing/Gradient.py trunk/src/workers/ImageProcessing/ImageProcessing/MeasureFocus.py trunk/src/workers/ImageProcessing/ImageProcessing/NDImageWorker.py trunk/src/workers/ImageProcessing/ImageProcessing/Watershed.py Modified: trunk/src/pyphant/pyphant/core/H5FileHandler.py =================================================================== --- trunk/src/pyphant/pyphant/core/H5FileHandler.py 2009-07-21 11:42:02 UTC (rev 652) +++ trunk/src/pyphant/pyphant/core/H5FileHandler.py 2009-07-21 13:41:23 UTC (rev 653) @@ -37,7 +37,7 @@ __version__ = "$Revision$" # $Source$: import tables -from pyphant.core import DataContainer +from pyphant.core import (DataContainer, FieldContainer) from tables import StringCol from pyphant.quantities.PhysicalQuantities import PhysicalQuantity import scipy @@ -143,7 +143,7 @@ + DataContainer.parseId(row['id'])[0])) for row in dimTable.iterrows()] except tables.NoSuchNodeError: - dimensions = DataContainer.INDEX + dimensions = FieldContainer.INDEX result = DataContainer.FieldContainer(data, unit, error, mask, dimensions, longname, shortname, attributes) @@ -343,7 +343,7 @@ (u"Mask of "+result.longname).encode("utf-8")) self.handle.setNodeAttr(resultGroup, "unit", repr(result.unit).encode("utf-8")) - if result.dimensions != DataContainer.INDEX: + if result.dimensions != FieldContainer.INDEX: idLen = max([len(dim.id.encode("utf-8")) for dim in result.dimensions]) dimTable = self.handle.createTable(resultGroup, "dimensions", Modified: trunk/src/pyphant/pyphant/wxgui2/wxPyphantApplication.py =================================================================== --- trunk/src/pyphant/pyphant/wxgui2/wxPyphantApplication.py 2009-07-21 11:42:02 UTC (rev 652) +++ trunk/src/pyphant/pyphant/wxgui2/wxPyphantApplication.py 2009-07-21 13:41:23 UTC (rev 653) @@ -181,10 +181,10 @@ if self._wxPyphantApp.pathToRecipe[-3:] == '.h5': if os.path.exists(self._wxPyphantApp.pathToRecipe): recipe = pyphant.core.PyTablesPersister.loadRecipeFromHDF5File(self._wxPyphantApp.pathToRecipe) - from pyphant.core import KnowledgeManager - KnowledgeManager.KnowledgeManager.getInstance().registerURL( - "file:///"+os.path.realpath(self._wxPyphantApp.pathToRecipe) - ) + #from pyphant.core import KnowledgeManager + #KnowledgeManager.KnowledgeManager.getInstance().registerURL( + # "file:///"+os.path.realpath(self._wxPyphantApp.pathToRecipe) + # ) self._remainingSpace=PyphantCanvas.PyphantCanvas(self, recipe) else: self._remainingSpace=PyphantCanvas.PyphantCanvas(self) Copied: trunk/src/workers/ImageProcessing/ImageProcessing/EnhanceContrast.py (from rev 652, trunk/src/workers/ImageProcessing/ImageProcessing/__init__.py) =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/EnhanceContrast.py (rev 0) +++ trunk/src/workers/ImageProcessing/ImageProcessing/EnhanceContrast.py 2009-07-21 13:41:23 UTC (rev 653) @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) 2006-2007, Rectorate of the University of Freiburg +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the Freiburg Materials Research Center, +# University of Freiburg nor the names of its contributors may be used to +# endorse or promote products derived from this software without specific +# prior written permission. +# +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER +# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +u""" +The Thresholding Worker is a class of Pyphant's image Processing +Toolbox. The threshold can be edited in the worker's configuration. It +returns a binary image where pixels that comprise features are set to +0x00 whereas background pixels are set to 0xFF. +""" + +__id__ = "$Id$" +__author__ = "$Author$" +__version__ = "$Revision$" +# $Source$ + +from pyphant.core import Worker, Connectors,\ + Param, DataContainer + +import ImageProcessing +from ImageProcessing.NDImageWorker import pile +import scipy, copy + +def normalizeHistogram(data): + histogram = scipy.ndimage.histogram(data.astype("f"), 0, 255, 256) + cumulatedHistogram = scipy.cumsum(histogram) + nch = cumulatedHistogram.astype("f")/len(data.flat) + inch = (nch*255).astype("i") + normalize = scipy.vectorize(lambda i: inch[i]) + return normalize(data) + #return data + +def normalizeLinear(data): + res = data - data.min() + res = (res * 255) / res.max() + return res + +class EnhanceContrast(Worker.Worker): + API = 2 + VERSION = 1 + REVISION = "$Revision$"[11:-1] + name = "EnhanceContrast" + _sockets = [("image", Connectors.TYPE_IMAGE)] + + @Worker.plug(Connectors.TYPE_IMAGE) + def enhance(self, image, subscriber=0): + newdata = pile(normalizeLinear, image.data) + longname = "Normalize" + result = DataContainer.FieldContainer( + newdata, + 1, + None, + copy.deepcopy(image.mask), + copy.deepcopy(image.dimensions), + longname, + image.shortname, + copy.deepcopy(image.attributes), + False) + result.seal() + #print newdata.shape + return result Added: trunk/src/workers/ImageProcessing/ImageProcessing/FindLocalExtrema.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/FindLocalExtrema.py (rev 0) +++ trunk/src/workers/ImageProcessing/ImageProcessing/FindLocalExtrema.py 2009-07-21 13:41:23 UTC (rev 653) @@ -0,0 +1,123 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) 2006-2007, Rectorate of the University of Freiburg +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the Freiburg Materials Research Center, +# University of Freiburg nor the names of its contributors may be used to +# endorse or promote products derived from this software without specific +# prior written permission. +# +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER +# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +u""" +TODO +""" + +__id__ = "$Id$" +__author__ = "$Author$" +__version__ = "$Revision$" +# $Source$ + +from pyphant.core import Worker, Connectors,\ + Param, DataContainer + +import ImageProcessing +from ImageProcessing.NDImageWorker import pile +import scipy, copy +from scipy import ndimage +from numpy import (alltrue, zeros) + +class FindLocalExtrema(Worker.Worker): + API = 2 + VERSION = 1 + REVISION = "$Revision$"[11:-1] + name = "FindLocalExtrema" + _sockets = [("image", Connectors.TYPE_IMAGE)] + _params = [("maxmin", "max/min", ["max", "min"], None), + ("excolor", "marker color (-1 for labeling)", 255, None), + ("tolerance", "tolerance in %", 20, None)] + + def compare(self, pvalue, nhood, viewextr): + if self.paramMaxmin.value == "max": + c = pvalue >= (viewextr * (100 - self.paramTolerance.value)) / 100 + else: + c = pvalue <= (viewextr * (100 + self.paramTolerance.value)) / 100 + return c + + def getPoints(self, data, sl): + if sl[0].stop - sl[0].start < 3 or sl[1].stop - sl[1].start < 3: + return [] + points = [] + if self.paramMaxmin.value == "max": + viewextr = data[sl].max() + else: + viewextr = data[sl].min() + for y in xrange(sl[0].start, sl[0].stop): + ydiff = 1 + if y == 0: + ydiff = 0 + for x in xrange(sl[1].start, sl[1].stop): + xdiff = 1 + if x == 0: + xdiff = 0 + nhood = data[y - ydiff:y + 2, + x - xdiff:x + 2] + pvalue = data[y, x] + if not alltrue(pvalue == nhood): + if self.compare(pvalue, nhood, viewextr): + if self.paramExcolor.value == -1: + color = self.nextlabel + else: + color = self.paramExcolor.value + points.append(((y, x), color)) + self.nextlabel += 1 + return points + + def findExtrema(self, data): + labeled = ndimage.label(data)[0] + self.nextlabel = 1 + slices = ndimage.find_objects(labeled) + res = zeros(data.shape, int) + for sl in slices: + points = self.getPoints(data, sl) + for p, c in points: + res[p] = c + return res + + @Worker.plug(Connectors.TYPE_IMAGE) + def find(self, image, subscriber=0): + newdata = pile(self.findExtrema, image.data) + longname = "FindLocalExtrema" + result = DataContainer.FieldContainer( + newdata, + copy.deepcopy(image.unit), + copy.deepcopy(image.error), + copy.deepcopy(image.mask), + copy.deepcopy(image.dimensions), + longname, + image.shortname, + copy.deepcopy(image.attributes), + False) + result.seal() + #print newdata.shape + return result Added: trunk/src/workers/ImageProcessing/ImageProcessing/FitBackground.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/FitBackground.py (rev 0) +++ trunk/src/workers/ImageProcessing/ImageProcessing/FitBackground.py 2009-07-21 13:41:23 UTC (rev 653) @@ -0,0 +1,143 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) 2006-2007, Rectorate of the University of Freiburg +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the Freiburg Materials Research Center, +# University of Freiburg nor the names of its contributors may be used to +# endorse or promote products derived from this software without specific +# prior written permission. +# +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER +# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +u""" +TODO +""" + +__id__ = "$Id$" +__author__ = "$Author$" +__version__ = "$Revision$" +# $Source$ + +from pyphant.core import Worker, Connectors,\ + Param, DataContainer +import ImageProcessing +import numpy, copy +from scipy import (ndimage, interpolate) + +class FitBackground(Worker.Worker): + API = 2 + VERSION = 1 + REVISION = "$Revision$"[11:-1] + name = "FitBackground" + _sockets = [("image", Connectors.TYPE_IMAGE)] + _params = [("poldegree", "Polynomial degree (1 to 5)", 3, None), + ("swidth", "sample width", 150, None), + ("sheight", "sample height", 150, None), + ("threshold", "Background threshold", 255, None), + ("mediansize", "Median kernel size", 3, None), + ("medianruns", "Median runs", 3, None), + ("darksize", "Erosion kernel size", 3, None), + ("darkruns", "Erosion runs", 4, None), + ("brightsize", "Inverted erosion size", 6, None), + ("brightruns", "Inverted erosion runs", 10, None), + ("dopreview", "Preview fit input", False, None)] + + def fit(self, data, poldegree, swidth, sheight, threshold): + if int(threshold) == -1: + threshold = (int(data.mean()) * 10) / 7 + dims = data.shape + xList = [] + yList = [] + zList = [] + for y in xrange(0, dims[0] - 1, sheight): + for x in xrange(0, dims[1] - 1, swidth): + view = data[y:y + sheight, x:x + swidth] + flatIndex = numpy.argmax(view) + yIdx, xIdx = numpy.unravel_index(flatIndex, view.shape) + zValue = view[yIdx, xIdx] + if zValue <= threshold: + xList.append(x + xIdx) + yList.append(y + yIdx) + zList.append(zValue) + if len(xList) < (poldegree + 1) * (poldegree + 1): + raise ValueError("Not enough reference points.") + tck = interpolate.bisplrep(yList, xList, zList, + kx=poldegree, ky=poldegree, + xb=0, yb=0, + xe=int(dims[0]), ye=int(dims[1])) + clipmin, clipmax = data.min(), threshold + return interpolate.bisplev(range(dims[0]), range(dims[1]), + tck).clip(clipmin, clipmax) + + + @Worker.plug(Connectors.TYPE_IMAGE) + def fit_background(self, image, subscriber=0): + poldegree = self.paramPoldegree.value + swidth = self.paramSwidth.value + sheight = self.paramSheight.value + threshold = self.paramThreshold.value + mediansize = self.paramMediansize.value + medianruns = self.paramMedianruns.value + darksize = self.paramDarksize.value + darkruns = self.paramDarkruns.value + brightsize = self.paramBrightsize.value + brightruns = self.paramBrightruns.value + dopreview = self.paramDopreview.value + assert image.data.ndim in [2, 3] + if image.data.ndim == 2: + pile = [image.data] + else: + pile = image.data + #Median + for run in xrange(medianruns): + pile = [ndimage.median_filter(data, + size=mediansize) for data in pile] + #Suspend dark spots: + for run in xrange(darkruns): + pile = [255 - ndimage.grey_erosion(255 - data, + size=darksize) for data in pile] + #Suspend features: + for run in xrange(brightruns): + pile = [ndimage.grey_erosion(data, + size=brightsize) for data in pile] + #Fit background: + if not dopreview: + pile = [self.fit(data, poldegree, swidth, sheight, + threshold) for data in pile] + if image.data.ndim == 2: + newdata = pile[0] + else: + newdata = numpy.array(pile) + longname = "FitBackground" + result = DataContainer.FieldContainer( + newdata, + copy.deepcopy(image.unit), + copy.deepcopy(image.error), + copy.deepcopy(image.mask), + copy.deepcopy(image.dimensions), + longname, + image.shortname, + copy.deepcopy(image.attributes), + False) + result.seal() + return result Copied: trunk/src/workers/ImageProcessing/ImageProcessing/Gradient.py (from rev 652, trunk/src/workers/ImageProcessing/ImageProcessing/__init__.py) =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/Gradient.py (rev 0) +++ trunk/src/workers/ImageProcessing/ImageProcessing/Gradient.py 2009-07-21 13:41:23 UTC (rev 653) @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) 2006-2007, Rectorate of the University of Freiburg +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the Freiburg Materials Research Center, +# University of Freiburg nor the names of its contributors may be used to +# endorse or promote products derived from this software without specific +# prior written permission. +# +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER +# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +u""" +TODO +""" + +__id__ = "$Id$" +__author__ = "$Author$" +__version__ = "$Revision$" +# $Source$ + +from pyphant.core import Worker, Connectors,\ + Param, DataContainer +import ImageProcessing +import numpy, copy +from ImageProcessing.NDImageWorker import pile + +def gradient(data): + res = numpy.sqrt(sum( + numpy.square(numpy.array(numpy.gradient(data))) + )) + return (res * 255.0).astype(int) / 361 + + +class Gradient(Worker.Worker): + API = 2 + VERSION = 1 + REVISION = "$Revision$"[11:-1] + name = "Gradient" + _sockets = [("image", Connectors.TYPE_IMAGE)] + + @Worker.plug(Connectors.TYPE_IMAGE) + def gradientWorker(self, image, subscriber=0): + #TODO: Check whether all dimensions have same unit + newdata = gradient(image.data) + longname = "Gradient" + result = DataContainer.FieldContainer( + newdata, + (361.0 / 255.0) * (image.unit / image.dimensions[0].unit), + None, + copy.deepcopy(image.mask), + copy.deepcopy(image.dimensions), + longname, + image.shortname, + copy.deepcopy(image.attributes), + False) + result.seal() + return result Added: trunk/src/workers/ImageProcessing/ImageProcessing/MeasureFocus.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/MeasureFocus.py (rev 0) +++ trunk/src/workers/ImageProcessing/ImageProcessing/MeasureFocus.py 2009-07-21 13:41:23 UTC (rev 653) @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) 2006-2007, Rectorate of the University of Freiburg +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the Freiburg Materials Research Center, +# University of Freiburg nor the names of its contributors may be used to +# endorse or promote products derived from this software without specific +# prior written permission. +# +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER +# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +u""" +TODO +""" + +__id__ = "$Id$" +__author__ = "$Author$" +__version__ = "$Revision$" +# $Source$ + +from pyphant.core import Worker, Connectors,\ + Param, DataContainer +import ImageProcessing +import numpy, copy +from scipy import ndimage +from ImageProcessing.NDImageWorker import pile + +class MeasureFocus(Worker.Worker): + API = 2 + VERSION = 1 + REVISION = "$Revision$"[11:-1] + name = "MeasureFocus" + _sockets = [("image", Connectors.TYPE_IMAGE), + ("labels", Connectors.TYPE_IMAGE)] + _params = [("grow", "grow slices by #n pixels:", 3, None)] + + def getFocus(self, data): + #return numpy.sum(numpy.sqrt(numpy.sum(numpy.square( + # numpy.array(numpy.gradient(data)))))) / data.size + return numpy.sum(data) / data.size + + def sliceAndMeasure(self, data): + grow = self.paramGrow.value + slices = ndimage.find_objects(self._labels) + res = numpy.zeros(data.shape) + label = 0 + for sl in slices: + label += 1 + if sl[0].stop - sl[0].start >= 3 and sl[1].stop - sl[1].start >= 3: + start = [sl[0].start - grow, sl[1].start - grow] + stop = [sl[0].stop + grow, sl[1].stop + grow] + if start[0] < 0: start[0] = 0 + if start[1] < 0: start[1] = 0 + bigsl = (slice(start[0], stop[0]), slice(start[1], stop[1])) + focus = self.getFocus(data[bigsl]) + res[sl] = numpy.where(self._labels[sl] == label, focus, res[sl]) + return res + + @Worker.plug(Connectors.TYPE_IMAGE) + def measure_focus(self, image, labels, subscriber=0): + self._labels = labels.data + newdata = pile(self.sliceAndMeasure, image.data) + longname = "MeasureFocus" + result = DataContainer.FieldContainer( + newdata, + copy.deepcopy(image.unit), + copy.deepcopy(image.error), + copy.deepcopy(image.mask), + copy.deepcopy(image.dimensions), + longname, + image.shortname, + copy.deepcopy(image.attributes), + False) + result.seal() + return result Added: trunk/src/workers/ImageProcessing/ImageProcessing/NDImageWorker.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/NDImageWorker.py (rev 0) +++ trunk/src/workers/ImageProcessing/ImageProcessing/NDImageWorker.py 2009-07-21 13:41:23 UTC (rev 653) @@ -0,0 +1,147 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) 2006-2007, Rectorate of the University of Freiburg +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the Freiburg Materials Research Center, +# University of Freiburg nor the names of its contributors may be used to +# endorse or promote products derived from this software without specific +# prior written permission. +# +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER +# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +u""" +TODO +""" + +__id__ = "$Id$" +__author__ = "$Author$" +__version__ = "$Revision$" +# $Source$ + +from pyphant.core import Worker, Connectors,\ + Param, DataContainer +import ImageProcessing +import numpy, copy +from scipy import ndimage + +def pile(func, imagedata, runs=1, dopile=True): + assert imagedata.ndim in [2, 3] + assert runs >= 0 + if runs == 0: + return imagedata + if imagedata.ndim == 2 or not dopile: + pile = [imagedata] + else: + pile = imagedata + for run in xrange(runs): + pile = [func(data) for data in pile] + if imagedata.ndim == 2 or not dopile: + newdata = pile[0] + else: + newdata = numpy.array(pile) + return newdata + + +class NDImage(Worker.Worker): + API = 2 + VERSION = 1 + REVISION = "$Revision$"[11:-1] + name = "ndimage" + _sockets = [("image", Connectors.TYPE_IMAGE)] + _filters = {"binary_closing":("iterations", ), + "binary_opening":("iterations", ), + "binary_fill_holes":(), + "binary_erosion":("iterations", ), + "binary_dilation":("iterations", ), + "maximum_filter":("size", "mode", "cval"), + "median_filter":("size", "mode", "cval"), + "grey_closing":("size", "mode", "cval"), + "grey_erosion":("size", "mode", "cval"), + "grey_opening":("size", "mode", "cval"), + "grey_invert":(None, ), + "cut_histogram":(None, "tolerance"), + "label":(None, "connectivity")} + _ndparams = {"iterations":1, + "size":5, + "mode":["reflect", + "nearest", + "wrap", + "constant"], + "cval":0, + "tolerance":1000, + "connectivity":2} + _params = [("pile", "Treat 3d images as pile of 2d images", True, None), + ("ndfilter", "Filter", _filters.keys(), None)] + _params += [(pn, pn, dflt, None) for pn, dflt in _ndparams.iteritems()] + + def grey_invert(self, data): + return 255 - data + + def label(self, data, connectivity): + structure = ndimage.morphology.generate_binary_structure(data.ndim, + connectivity) + return ndimage.label(data, structure=structure)[0] + + def cut_histogram(self, data, tolerance): + hist = ndimage.histogram(data, 0, 256, 256) + csum = numpy.cumsum(hist) + cut = csum[255] / tolerance + for i in xrange(len(csum)): + if csum[i] > cut: + newmin = i + break + meanvalue = data.mean() + return numpy.where(data < newmin, meanvalue, data) + + def applyfilter(self, data): + if None in self._filters[self.paramNdfilter.value]: + call = getattr(self, self.paramNdfilter.value) + else: + call = getattr(ndimage, self.paramNdfilter.value) + args = {} + for par in self._filters[self.paramNdfilter.value]: + if par != None: + args[par] = self.getParam(par).value + print args + return call(data, **args) + + @Worker.plug(Connectors.TYPE_IMAGE) + def ndimage(self, image, subscriber=0): + if "iterations" in self._filters[self.paramNdfilter.value]: + runs = 1 + else: + runs = self.paramIterations.value + newdata = pile(self.applyfilter, image.data, runs, self.paramPile.value) + longname = "%s" % (self.paramNdfilter.value, ) + result = DataContainer.FieldContainer( + newdata, + copy.deepcopy(image.unit), + None, + copy.deepcopy(image.mask), + copy.deepcopy(image.dimensions), + longname, + image.shortname, + copy.deepcopy(image.attributes), + False) + result.seal() + return result Added: trunk/src/workers/ImageProcessing/ImageProcessing/Watershed.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/Watershed.py (rev 0) +++ trunk/src/workers/ImageProcessing/ImageProcessing/Watershed.py 2009-07-21 13:41:23 UTC (rev 653) @@ -0,0 +1,110 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) 2006-2007, Rectorate of the University of Freiburg +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the Freiburg Materials Research Center, +# University of Freiburg nor the names of its contributors may be used to +# endorse or promote products derived from this software without specific +# prior written permission. +# +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER +# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +u""" +TODO +""" + +__id__ = "$Id$" +__author__ = "$Author$" +__version__ = "$Revision$" +# $Source$ + +from pyphant.core import Worker, Connectors,\ + Param, DataContainer + +import ImageProcessing +from ImageProcessing.NDImageWorker import pile +import copy +import heapq, scipy, scipy.ndimage, pylab, threading + +class Watershed(Worker.Worker): + API = 2 + VERSION = 1 + REVISION = "$Revision$"[11:-1] + name = "Watershed" + _sockets = [("image", Connectors.TYPE_IMAGE), + ("markers", Connectors.TYPE_IMAGE)] + #_params = [("maxmin", "max/min", ["max", "min"], None), + # ("domn", "dominates neighbours by", 0, None)] + + def watershed(self, a): + m = self._markers + d = scipy.ndimage.distance_transform_edt(a) + q = [] + w = m.copy() + for y, x in scipy.argwhere(m!=0): + heapq.heappush(q, (-d[y-1, x-1], (y-1, x-1))) + heapq.heappush(q, (-d[y-1, x], (y-1, x))) + heapq.heappush(q, (-d[y-1, x+1], (y-1, x+1))) + heapq.heappush(q, (-d[y, x-1], (y, x-1))) + heapq.heappush(q, (-d[y, x+1], (y, x+1))) + heapq.heappush(q, (-d[y+1, x-1], (y+1, x-1))) + heapq.heappush(q, (-d[y+1, x], (y+1, x))) + heapq.heappush(q, (-d[y+1, x+1], (y+1, x+1))) + while q: + y, x = heapq.heappop(q)[1] + l = scipy.unique(w[y-1:y+2,x-1:x+2]) + l = l[l!=0] + if len(l)==1: + w[y, x] = l[0] + for ny, nx in scipy.argwhere(w[y-1:y+2,x-1:x+2]==0): + if (ny==1) and (nx==1): + continue + ny += y - 1 + nx += x - 1 + try: + p = (-d[ny,nx], (ny, nx)) + d[ny,nx] = 0 + if p[0]!=0 and not p in q: + heapq.heappush(q, p) + except IndexError, e: + print e + return w + + @Worker.plug(Connectors.TYPE_IMAGE) + def wsworker(self, image, markers, subscriber=0): + self._markers = markers.data + newdata = pile(self.watershed, image.data) + longname = "Watershed" + result = DataContainer.FieldContainer( + newdata, + copy.deepcopy(image.unit), + copy.deepcopy(image.error), + copy.deepcopy(image.mask), + copy.deepcopy(image.dimensions), + longname, + image.shortname, + copy.deepcopy(image.attributes), + False) + result.seal() + #print newdata.shape + return result Modified: trunk/src/workers/ImageProcessing/ImageProcessing/__init__.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/__init__.py 2009-07-21 11:42:02 UTC (rev 652) +++ trunk/src/workers/ImageProcessing/ImageProcessing/__init__.py 2009-07-21 13:41:23 UTC (rev 653) @@ -48,13 +48,20 @@ "DistanceMapper", "EdgeFillWorker", "EdgeTouchingFeatureRemover", + "EnhanceContrast", "FilterWorker", + "FindLocalExtrema", + "FitBackground", + "Gradient", "ImageLoaderWorker", "InvertWorker", "Medianiser", + "MeasureFocus", + "NDImageWorker", "SkeletonizeFeature", "ThresholdingWorker", "UltimatePointsCalculator", + "Watershed" ] def isFeature(point): This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <zk...@us...> - 2009-07-30 13:44:48
|
Revision: 655 http://pyphant.svn.sourceforge.net/pyphant/?rev=655&view=rev Author: zklaus Date: 2009-07-30 13:44:20 +0000 (Thu, 30 Jul 2009) Log Message: ----------- Merge branch 'master' into svn-trunk Modified Paths: -------------- trunk/src/pyphant/pyphant/core/DataContainer.py trunk/src/pyphant/pyphant/core/FieldContainer.py trunk/src/pyphant/pyphant/core/PyTablesPersister.py trunk/src/pyphant/pyphant/tests/TestDataContainer.py trunk/src/pyphant/pyphant/tests/TestPyTablesPersister.py trunk/src/pyphant/pyphant/visualizers/ImageVisualizer.py trunk/src/workers/OSC/OSC/convert2FMF.py trunk/src/workers/fmfile/fmfile/FMFWriter.py Added Paths: ----------- trunk/src/pyphant/pyphant/core/Helpers.py Modified: trunk/src/pyphant/pyphant/core/DataContainer.py =================================================================== --- trunk/src/pyphant/pyphant/core/DataContainer.py 2009-07-22 17:42:41 UTC (rev 654) +++ trunk/src/pyphant/pyphant/core/DataContainer.py 2009-07-30 13:44:20 UTC (rev 655) @@ -72,30 +72,15 @@ import os, platform, datetime, socket, urlparse from pyphant.quantities.PhysicalQuantities import (isPhysicalQuantity, PhysicalQuantity) +import Helpers - import logging _logger = logging.getLogger("pyphant") - #Default string encoding enc = lambda s: unicode(s, "utf-8") -#Set USER variable used for the emd5 tag -pltform = platform.system() -if pltform == 'Linux' or pltform == 'Darwin': - USER = enc(os.environ['LOGNAME']) -elif pltform == 'Windows': - try: - USER = enc(os.environ['USERNAME']) - except: - USER = u"Unidentified User" -else: - raise NotImplementedError, "Unsupported Platform %s" %pltform - - - def parseId(id): u"""Returns tupple (HASH, TYPESTRING) from given .id attribute.""" resUri = urlparse.urlsplit(id) @@ -123,6 +108,8 @@ def __init__(self, longname, shortname, attributes=None): self.longname = longname self.shortname = shortname + self.machine = Helpers.getMachine() + self.creator = Helpers.getUsername() if type(attributes) == type({}): self.attributes = attributes else: @@ -167,6 +154,16 @@ "be modified anymore.") self.lock.release() + def generateHash(self, m=None): + if m == None: + m = hashlib.md5() + m.update(self.longname) + m.update(self.shortname) + m.update(self.machine) + m.update(self.creator) + m.update(str(self.attributes)) + return enc(m.hexdigest()) + def seal(self, id=None): with self.lock: if self.id: @@ -178,10 +175,10 @@ self.id = id else: self.hash = self.generateHash() - now = enc(datetime.datetime.utcnow().isoformat('_')) - self.id = u"emd5://%s/%s/%s/%s.%s" % (enc(socket.getfqdn()), - USER, - now, + self.timestamp = datetime.datetime.utcnow() + self.id = u"emd5://%s/%s/%s/%s.%s" % (self.machine, + self.creator, + enc(self.timestamp.isoformat('_')), self.hash, self.typeString) @@ -232,12 +229,11 @@ return u"%s %s" % (self.longname, self.shortname) label = property(_getLabel) - def generateHash(self): - m = hashlib.md5() + def generateHash(self, m=None): + if m == None: + m = hashlib.md5() + super(SampleContainer, self).generateHash(m) m.update(u''.join([c.hash for c in self.columns])) - m.update(str(self.attributes)) - m.update(self.longname) - m.update(self.shortname) return enc(m.hexdigest()) def __deepcopy__(self, memo): @@ -246,6 +242,8 @@ res.columns = copy.deepcopy(self.columns, memo) res.longname = copy.deepcopy(self.longname, memo) res.shortname = copy.deepcopy(self.shortname, memo) + res.creator = copy.deepcopy(self.creator, memo) + res.machine = copy.deepcopy(self.machine, memo) res.attributes = copy.deepcopy(self.attributes, memo) self.lock.release() return res Modified: trunk/src/pyphant/pyphant/core/FieldContainer.py =================================================================== --- trunk/src/pyphant/pyphant/core/FieldContainer.py 2009-07-22 17:42:41 UTC (rev 654) +++ trunk/src/pyphant/pyphant/core/FieldContainer.py 2009-07-30 13:44:20 UTC (rev 655) @@ -276,17 +276,16 @@ self.lock.release() return res - def generateHash(self): - m = hashlib.md5() + def generateHash(self, m=None): + if m == None: + m = hashlib.md5() + super(FieldContainer, self).generateHash(m) m.update(str(self.data.tolist())) m.update(str(self.unit)) if self.error!=None: m.update(str(self.error.tolist())) if self.mask!=None: m.update(str(self.mask.tolist())) - m.update(str(self.attributes)) - m.update(self.longname.encode('utf-8')) - m.update(self.shortname.encode('utf-8')) [m.update(dim.hash) for dim in self._dimensions] return enc(m.hexdigest()) Added: trunk/src/pyphant/pyphant/core/Helpers.py =================================================================== --- trunk/src/pyphant/pyphant/core/Helpers.py (rev 0) +++ trunk/src/pyphant/pyphant/core/Helpers.py 2009-07-30 13:44:20 UTC (rev 655) @@ -0,0 +1,86 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) 2006-2008, Rectorate of the University of Freiburg +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the Freiburg Materials Research Center, +# University of Freiburg nor the names of its contributors may be used to +# endorse or promote products derived from this software without specific +# prior written permission. +# +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER +# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import os, os.path + +def getPyphantPath(subdir = '/'): + """ + returns full pyphant path with optional subdirectory + subdir -- subdirectory that is created if it does not exist already, + recursive creation of directories is supported also. + """ + homedir = os.path.expanduser('~') + if not subdir.startswith('/'): + subdir = '/' + subdir + if not subdir.endswith('/'): + subdir = subdir + '/' + if homedir == '~': + homedir = os.getcwdu() + plist = ('/.pyphant' + subdir).split('/') + makedir = homedir + path = homedir + '/.pyphant' + subdir + for p in plist: + if p != '': + makedir += "/%s" % (p, ) + if not os.path.isdir(makedir): + os.mkdir(makedir) + return path + +def getUsername(): + enc = lambda s: unicode(s, "utf-8") + import platform + pltform = platform.system() + if pltform == 'Linux' or pltform == 'Darwin': + user = enc(os.environ['LOGNAME']) + elif pltform == 'Windows': + try: + user = enc(os.environ['USERNAME']) + except: + user = u"Unidentified User" + else: + raise NotImplementedError, "Unsupported Platform %s" %pltform + return user + +def getMachine(): + import socket + return unicode(socket.getfqdn(), 'utf-8') + +def enableLogging(): + """ + Enables logging to stdout for debug purposes. + """ + l = logging.getLogger("pyphant") + l.setLevel(logging.DEBUG) + f = logging.Formatter('%(asctime)s [%(name)s|%(levelname)s] %(message)s') + h = logging.StreamHandler(sys.stderr) + h.setFormatter(f) + l.addHandler(h) + l.info("Logger 'pyphant' has been configured for debug purposes.") Modified: trunk/src/pyphant/pyphant/core/PyTablesPersister.py =================================================================== --- trunk/src/pyphant/pyphant/core/PyTablesPersister.py 2009-07-22 17:42:41 UTC (rev 654) +++ trunk/src/pyphant/pyphant/core/PyTablesPersister.py 2009-07-30 13:44:20 UTC (rev 655) @@ -70,7 +70,7 @@ import logging _logger = logging.getLogger("pyphant") -_reservedAttributes = ('longname','shortname','columns') +_reservedAttributes = ('longname', 'shortname', 'columns', 'creator', 'machine') class Connection(tables.IsDescription): destinationWorker = tables.StringCol(len("worker_"+str(sys.maxint))+1) @@ -167,6 +167,8 @@ def saveSample(h5, resultGroup, result): h5.setNodeAttr(resultGroup, "longname", result.longname.encode("utf-8")) h5.setNodeAttr(resultGroup, "shortname", result.shortname.encode("utf-8")) + h5.setNodeAttr(resultGroup, "creator", result.creator.encode("utf-8")) + h5.setNodeAttr(resultGroup, "machine", result.machine.encode("utf-8")) for key,value in result.attributes.iteritems(): if key in _reservedAttributes: raise ValueError, "Attribute should not be named %s!" % _reservedAttributes @@ -197,6 +199,8 @@ h5.setNodeAttr(resultGroup.data,key,value) h5.setNodeAttr(resultGroup, "longname", result.longname.encode("utf-8")) h5.setNodeAttr(resultGroup, "shortname", result.shortname.encode("utf-8")) + h5.setNodeAttr(resultGroup, "creator", result.creator.encode("utf-8")) + h5.setNodeAttr(resultGroup, "machine", result.machine.encode("utf-8")) if result.error != None: h5.createArray(resultGroup, "error", result.error, @@ -284,6 +288,13 @@ def loadField(h5, resNode): longname = unicode(h5.getNodeAttr(resNode, "longname"), 'utf-8') shortname = unicode(h5.getNodeAttr(resNode, "shortname"), 'utf-8') + try: + creator = unicode(h5.getNodeAttr(resNode, "creator"), 'utf-8') + machine = unicode(h5.getNodeAttr(resNode, "machine"), 'utf-8') + except: + import Helpers + creator = Helpers.getUsername() + machine = Helpers.getMachine() data = scipy.array(resNode.data.read()) def loads(inputList): if type(inputList)==type([]): @@ -316,6 +327,8 @@ result = DataContainer.FieldContainer(data, unit, error, mask, dimensions, longname, shortname, attributes) + result.creator = creator + result.machine = machine result.seal(resNode._v_title) return result @@ -323,6 +336,8 @@ result = DataContainer.SampleContainer.__new__(DataContainer.SampleContainer) result.longname = unicode(h5.getNodeAttr(resNode, "longname"), 'utf-8') result.shortname = unicode(h5.getNodeAttr(resNode, "shortname"), 'utf-8') + result.creator = unicode(h5.getNodeAttr(resNode, "creator"), 'utf-8') + result.machine = unicode(h5.getNodeAttr(resNode, "machine"), 'utf-8') result.attributes = {} for key in resNode._v_attrs._v_attrnamesuser: if key not in _reservedAttributes: Modified: trunk/src/pyphant/pyphant/tests/TestDataContainer.py =================================================================== --- trunk/src/pyphant/pyphant/tests/TestDataContainer.py 2009-07-22 17:42:41 UTC (rev 654) +++ trunk/src/pyphant/pyphant/tests/TestDataContainer.py 2009-07-30 13:44:20 UTC (rev 655) @@ -560,15 +560,15 @@ except TypeError, e: pass else: - self.fail("Modification of sealed FieldContainer was not \ -prohibited.") + self.fail("Modification of sealed FieldContainer was not " + "prohibited.") try: sample['i'].data[0] = 4 except RuntimeError, e: pass else: - self.fail("Modification of sealed FieldContainer was not \ -prohibited.") + self.fail("Modification of sealed FieldContainer was not " + "prohibited.") def testSingleSample(self): #string = numpy.rec.fromrecords([(s,) for s in [u'Hello',u'World!',u'Bäh!']]) @@ -745,8 +745,8 @@ [True, True, False, True, True]) def testPrecedence2dExpression(self): - self._compareExpected('0m > "l" or not ("t" == 20s or "t" == 40s) and \ -(("l" == -20000m or "t" == 40s) or "l" == 5500m)', + self._compareExpected('0m > "l" or not ("t" == 20s or "t" == 40s) and ' + '(("l" == -20000m or "t" == 40s) or "l" == 5500m)', [True, False, False, False, True]) def testNestedTuple2dExpression(self): @@ -792,8 +792,8 @@ rescale=True) nt.assert_array_equal(self.testData, field.data, - "Rescale option shouldn't do anything for \ -unitless fields.") + "Rescale option shouldn't do anything for " + "unitless fields.") def testRescaleBaseUnitsFloats(self): field = FieldContainer(copy.deepcopy(self.testData).astype('f'), @@ -855,8 +855,8 @@ rescale=True) self.assertEqual(field.unit, self.unit, - "An integer field should not be rescaled, but should \ -hold the normation constant in its unit.") + "An integer field should not be rescaled, but should " + "hold the normation constant in its unit.") def testDimensionsRescaling(self): """ Modified: trunk/src/pyphant/pyphant/tests/TestPyTablesPersister.py =================================================================== --- trunk/src/pyphant/pyphant/tests/TestPyTablesPersister.py 2009-07-22 17:42:41 UTC (rev 654) +++ trunk/src/pyphant/pyphant/tests/TestPyTablesPersister.py 2009-07-30 13:44:20 UTC (rev 655) @@ -45,8 +45,10 @@ import scipy import copy, datetime from pyphant.quantities.PhysicalQuantities import PhysicalQuantity -from pyphant.core.DataContainer import FieldContainer,SampleContainer,assertEqual -from pyphant.core.PyTablesPersister import saveField,loadField,saveSample,loadSample,saveExecutionOrder,loadExecutionOrders +from pyphant.core.DataContainer import FieldContainer, SampleContainer, assertEqual +from pyphant.core.PyTablesPersister import (saveField, loadField, saveSample, + loadSample, saveExecutionOrder, + loadExecutionOrders) import numpy.testing as nt import numpy import tables @@ -74,6 +76,7 @@ class FieldContainerTestCase(ContainerTestCase): def testSaveRestore(self): + self.field.creator=u"Klaus" self.field.seal() self.eln.createGroup(self.eln.root,'testSaveRestoreField') saveField(self.eln,self.eln.root.testSaveRestoreField,self.field) Modified: trunk/src/pyphant/pyphant/visualizers/ImageVisualizer.py =================================================================== --- trunk/src/pyphant/pyphant/visualizers/ImageVisualizer.py 2009-07-22 17:42:41 UTC (rev 654) +++ trunk/src/pyphant/pyphant/visualizers/ImageVisualizer.py 2009-07-30 13:44:20 UTC (rev 655) @@ -48,7 +48,8 @@ from pyphant.core.Connectors import TYPE_IMAGE from pyphant.wxgui2.DataVisReg import DataVisReg from pyphant.quantities.PhysicalQuantities import isPhysicalQuantity -from NonUniformImage import NonUniformImage +#from NonUniformImage import NonUniformImage +from matplotlib.image import NonUniformImage class F(pylab.Formatter): def __init__(self, container, *args, **kwargs): @@ -75,15 +76,17 @@ yc=event.ydata xi = numpy.abs(x.data-xc).argmin() yi = numpy.abs(y.data-yc).argmin() - if (self.fieldContainer.mask != None) and self.fieldContainer.mask[yi, xi]: + if ((self.fieldContainer.mask != None) + and self.fieldContainer.mask[yi, xi]): val = "n/a" else: try: - val = self.fieldContainer.data[yi, xi]*self.fieldContainer.unit + val = self.fieldContainer.data[yi, xi] + val *= self.fieldContainer.unit except IndexError: val = "nan" - xval = xc*x.unit - yval = yc*y.unit + xval = xc * x.unit + yval = yc * y.unit def format(val): if not isPhysicalQuantity(val): if type(val) in (type(' '),type(u' ')): @@ -95,7 +98,8 @@ return valstr labels = map(format,[xval,yval,val]) labels.insert(0,zLabel) - self.figure.canvas.toolbar.set_message("%s(%s,%s) = %s" % tuple(labels)) + self.figure.canvas.toolbar.set_message("%s(%s,%s) = %s" + % tuple(labels)) else: self.figure.canvas.toolbar.set_message("outside axis") @@ -109,7 +113,8 @@ xmax=scipy.amax(x) ymin=scipy.amin(y) ymax=scipy.amax(y) - #Support for images with non uniform axes adapted from python-matplotlib-doc/examples/pcolor_nonuniform.py + #Support for images with non uniform axes adapted + #from python-matplotlib-doc/examples/pcolor_nonuniform.py ax = self.figure.add_subplot(111) vmin = self.fieldContainer.attributes.get('vmin', None) vmax = self.fieldContainer.attributes.get('vmax', None) Modified: trunk/src/workers/OSC/OSC/convert2FMF.py =================================================================== --- trunk/src/workers/OSC/OSC/convert2FMF.py 2009-07-22 17:42:41 UTC (rev 654) +++ trunk/src/workers/OSC/OSC/convert2FMF.py 2009-07-30 13:44:20 UTC (rev 655) @@ -94,18 +94,8 @@ import StringIO from optparse import OptionParser import platform,os +import pyphant.core.Helpers -#Set USER variable -pltform=platform.system() -if pltform=='Linux': - import pwd - USER=pwd.getpwuid(os.getuid())[0] -elif pltform=='Windows': - try: - USER=os.environ['USERNAME'] - except: - USER=u"Unidentified User" - parser= OptionParser(__doc__) parser.add_option("-i", "--input-format", dest="iniFormat",default='RiedeINI', help="FORMAT of input data (default is RiedeINI)", metavar="FORMAT", @@ -150,7 +140,7 @@ def annotation(): stream = '' stream += 'patched date: %s\n' % modDate - stream += 'patched by: %s\n' % USER + stream += 'patched by: %s\n' % pyphant.core.Helpers.getUsername() stream += 'place: Freiburg i. Brsg.\n' stream += 'organization: Freiburger Materialforschungszentrum (FMF)\n' return stream Modified: trunk/src/workers/fmfile/fmfile/FMFWriter.py =================================================================== --- trunk/src/workers/fmfile/fmfile/FMFWriter.py 2009-07-22 17:42:41 UTC (rev 654) +++ trunk/src/workers/fmfile/fmfile/FMFWriter.py 2009-07-30 13:44:20 UTC (rev 655) @@ -40,18 +40,8 @@ enc=lambda s: unicode(s, "utf-8") import platform,os,socket,datetime -pltform=platform.system() -if pltform=='Linux' or pltform=='Darwin': - USER=enc(os.environ['LOGNAME']) -elif pltform=='Windows': - try: - USER=enc(os.environ['USERNAME']) - except: - USER=u"Unidentified User" -else: - raise NotImplementedError, "Unsupported Platform %s" %pltform - import fmfgen, numpy +import pyphant.core.Helpers def dtype2colFormat(dtype): if dtype.name.startswith('float'): @@ -65,7 +55,7 @@ def field2fmf(fieldContainer): factory = fmfgen.gen_factory(out_coding='utf-8', eol='\n') fc = factory.gen_fmf() - fc.add_reference_item('author', USER) + fc.add_reference_item('author', pyphant.core.Helpers.getUsername()) fc.add_reference_item('title',fieldContainer.longname) fc.add_reference_item('place',socket.getfqdn()) fc.add_reference_item('created',datetime.datetime.utcnow().isoformat()) This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <zk...@us...> - 2009-09-28 10:18:25
|
Revision: 658 http://pyphant.svn.sourceforge.net/pyphant/?rev=658&view=rev Author: zklaus Date: 2009-09-28 10:18:07 +0000 (Mon, 28 Sep 2009) Log Message: ----------- Merge branch 'master' into svn-trunk * master: (64 commits) Fix: Fixed 'too many open files' bug Fix: Fixed bug in MeasureFocus and AutoFocus workers Enh: Added distance_trafo to ndimage worker Cosm: Removed dispensable includes Fix: Fixed types in FitBackground worker Enh: Added temporary batch workers Enh: Added unicode conversion tools to Helpers.py Enh: Autofocus Enh: Added KM visualizer Enh: Added switch for WebInterface to wxPyphant Fix: This patch is a collection of general bugfixes Enh: Replaced BatchWorker by BatchHead and BatchTail Enh: Added unicode conversion methods to Helpers.py Fix: Fixed bug in BatchWorker Fix: Fixed bug in KM cache Fix: Fixed possible source of error in FocusSlice Enh: Added test for KM cache (test fails) Cosm: Reduced cache size in KM to 10 Fix: Saving of large FocusSlices now possible Enh: Added test for saving large FocusSlice objects ... Modified Paths: -------------- trunk/src/pyphant/pyphant/core/H5FileHandler.py trunk/src/pyphant/pyphant/core/Helpers.py trunk/src/pyphant/pyphant/core/KnowledgeManager.py trunk/src/pyphant/pyphant/core/PyTablesPersister.py trunk/src/pyphant/pyphant/core/WebInterface.py trunk/src/pyphant/pyphant/tests/TestH5FileHandler.py trunk/src/pyphant/pyphant/tests/TestKnowledgeManager.py trunk/src/pyphant/pyphant/visualizers/ImageVisualizer.py trunk/src/pyphant/pyphant/visualizers/__init__.py trunk/src/pyphant/pyphant/wxgui2/wxPyphantApplication.py trunk/src/workers/ImageProcessing/ImageProcessing/FitBackground.py trunk/src/workers/ImageProcessing/ImageProcessing/Gradient.py trunk/src/workers/ImageProcessing/ImageProcessing/MeasureFocus.py trunk/src/workers/ImageProcessing/ImageProcessing/NDImageWorker.py trunk/src/workers/ImageProcessing/ImageProcessing/Watershed.py trunk/src/workers/OSC/OSC/OscAbsorption.py trunk/src/workers/tools/tools/Emd5Src.py trunk/src/workers/tools/tools/__init__.py Added Paths: ----------- trunk/src/pyphant/pyphant/tests/TestAutoFocus.py trunk/src/pyphant/pyphant/visualizers/KMVisualizer.py trunk/src/workers/ImageProcessing/ImageProcessing/AutoFocus.py trunk/src/workers/tools/tools/BatchExtractor.py trunk/src/workers/tools/tools/BatchHead.py trunk/src/workers/tools/tools/BatchTail.py trunk/src/workers/tools/tools/ParameterRun.py Modified: trunk/src/pyphant/pyphant/core/H5FileHandler.py =================================================================== --- trunk/src/pyphant/pyphant/core/H5FileHandler.py 2009-09-01 21:15:42 UTC (rev 657) +++ trunk/src/pyphant/pyphant/core/H5FileHandler.py 2009-09-28 10:18:07 UTC (rev 658) @@ -37,14 +37,14 @@ __version__ = "$Revision$" # $Source$: import tables -from pyphant.core import (DataContainer, FieldContainer) +from pyphant.core import DataContainer from tables import StringCol from pyphant.quantities.PhysicalQuantities import PhysicalQuantity import scipy import logging import os +from pyphant.core import PyTablesPersister _logger = logging.getLogger("pyphant") -_reservedAttributes = ('longname', 'shortname', 'columns') class H5FileHandler(object): @@ -66,8 +66,22 @@ raise IOError("File '%s' does not exist!"%(filename, )) self.filename = filename self.mode = mode + if mode == 'w': + tmphandle = tables.openFile(self.filename, 'w') + tmphandle.close() + self.mode = 'a' + self.handle = None + + def __enter__(self): + assert self.handle == None self.handle = tables.openFile(self.filename, self.mode) + return self + def __exit__(self, type, value, traceback): + if self.handle != None: + self.handle.close() + self.handle = None + def getNodeAndTypeFromId(self, dcId): """ Returns a tuple (HDF5 node, uriType) for the given @@ -109,46 +123,7 @@ instance. This method is intended for internal use only. resNode -- node at which the FieldContainer is located in the file. """ - longname = unicode(self.handle.getNodeAttr(resNode, "longname"), - 'utf-8') - shortname = unicode(self.handle.getNodeAttr(resNode, "shortname"), - 'utf-8') - data = scipy.array(resNode.data.read()) - def loads(inputList): - if type(inputList) == type([]): - try: - return map(lambda s: eval(s), inputList) - except: - return map(lambda s: unicode(s, 'utf-8'), inputList) - else: - return map(loads, inputList) - if data.dtype.char == 'S': - data = scipy.array(loads(data.tolist())) - attributes = {} - for key in resNode.data._v_attrs._v_attrnamesuser: - attributes[key] = self.handle.getNodeAttr(resNode.data, key) - try: - error = scipy.array(resNode.error.read()) - except tables.NoSuchNodeError: - error = None - try: - mask = scipy.array(resNode.mask.read()) - except tables.NoSuchNodeError: - mask = None - unit = eval(unicode(self.handle.getNodeAttr(resNode, "unit"), 'utf-8')) - try: - dimTable = resNode.dimensions - dimensions = [self.loadField(self.handle.getNode( - "/results/result_"\ - + DataContainer.parseId(row['id'])[0])) - for row in dimTable.iterrows()] - except tables.NoSuchNodeError: - dimensions = FieldContainer.INDEX - result = DataContainer.FieldContainer(data, unit, error, mask, - dimensions, longname, shortname, - attributes) - result.seal(resNode._v_title) - return result + return PyTablesPersister.loadField(self.handle, resNode) def loadSample(self, resNode): """ @@ -156,33 +131,7 @@ instance. This method is intended for internal use only. resNode -- node at which the SampleContainer is located in the file. """ - result = DataContainer.SampleContainer.__new__( - DataContainer.SampleContainer) - result.longname = unicode(self.handle.getNodeAttr(resNode, "longname"), - 'utf-8') - result.shortname = unicode(self.handle.getNodeAttr(resNode, - "shortname"), - 'utf-8') - result.attributes = {} - for key in resNode._v_attrs._v_attrnamesuser: - if key not in _reservedAttributes: - result.attributes[key] = self.handle.getNodeAttr(resNode, key) - columns = [] - for resId in self.handle.getNodeAttr(resNode, "columns"): - nodename = "/results/" + resId - dcHash, uriType = DataContainer.parseId(self.handle.getNodeAttr( - nodename, "TITLE")) - if uriType == 'sample': - loader = self.loadSample - elif uriType == 'field': - loader = self.loadField - else: - raise KeyError, "Unknown UriType %s in saving result %s."\ - % (uriType, result.id) - columns.append(loader(self.handle.getNode(nodename))) - result.columns = columns - result.seal(resNode._v_title) - return result + return PyTablesPersister.loadSample(self.handle, resNode) def loadSummary(self, dcId = None): """ @@ -202,14 +151,18 @@ summary['id'] = dcId resNode, uriType = self.getNodeAndTypeFromId(dcId) summary['longname'] = unicode(self.handle.getNodeAttr(resNode, - "longname"), - 'utf-8') + "longname"), 'utf-8') summary['shortname'] = unicode(self.handle.getNodeAttr(resNode, - "shortname"), - 'utf-8') + "shortname"), 'utf-8') emd5_split = dcId.split('/') - summary['host'] = unicode(emd5_split[2], 'utf-8') - summary['user'] = unicode(emd5_split[3], 'utf-8') + try: + summary['machine'] = unicode(self.handle.getNodeAttr(resNode, + "machine"), 'utf-8') + summary['creator'] = unicode(self.handle.getNodeAttr(resNode, + "creator"), 'utf-8') + except: + summary['machine'] = unicode(emd5_split[2], 'utf-8') + summary['creator'] = unicode(emd5_split[3], 'utf-8') summary['date'] = unicode(emd5_split[4], 'utf-8') summary['hash'] = emd5_split[5].split('.')[0] summary['type'] = unicode(emd5_split[5].split('.')[1], 'utf-8') @@ -240,7 +193,7 @@ summary['dimensions'] = dimensions elif uriType == 'sample': for key in resNode._v_attrs._v_attrnamesuser: - if key not in _reservedAttributes: + if key not in PyTablesPersister._reservedAttributes: attributes[key] = self.handle.getNodeAttr(resNode, key) columns = [] for resId in self.handle.getNodeAttr(resNode, "columns"): @@ -289,20 +242,7 @@ in the file. result -- SampleContainer instance to be saved """ - self.handle.setNodeAttr(resultGroup, "longname", - result.longname.encode("utf-8")) - self.handle.setNodeAttr(resultGroup, "shortname", - result.shortname.encode("utf-8")) - for key, value in result.attributes.iteritems(): - if key in _reservedAttributes: - raise ValueError, "Attribute should not be named %s!"\ - % _reservedAttributes - self.handle.setNodeAttr(resultGroup, key, value) - #Store fields of sample Container and gather list of field IDs - columns = [] - for column in result.columns: - columns.append(self.saveDataContainer(column)) - self.handle.setNodeAttr(resultGroup, "columns", columns) + PyTablesPersister.saveSample(self.handle, resultGroup, result) def saveField(self, resultGroup, result): """ @@ -312,59 +252,4 @@ in the file. result -- FieldContainer instance to be saved """ - def dump(inputList): - def conversion(arg): - if type(arg) == type(u' '): - return arg.encode('utf-8') - else: - return arg.__repr__() - if type(inputList) == type([]): - return map(conversion, inputList) - else: - return map(dump, inputList) - if result.data.dtype.char in ['U', 'O']: - unicodeData = scipy.array(dump(result.data.tolist())) - self.handle.createArray(resultGroup, "data", unicodeData, - result.longname.encode("utf-8")) - else: - self.handle.createArray(resultGroup, "data", result.data, - result.longname.encode("utf-8")) - for key, value in result.attributes.iteritems(): - self.handle.setNodeAttr(resultGroup.data, key, value) - self.handle.setNodeAttr(resultGroup, "longname", - result.longname.encode("utf-8")) - self.handle.setNodeAttr(resultGroup, "shortname", - result.shortname.encode("utf-8")) - if result.error != None: - self.handle.createArray(resultGroup, "error", result.error, - (u"Error of " + result.longname).encode("utf-8")) - if result.mask != None: - self.handle.createArray(resultGroup, "mask", result.mask, - (u"Mask of "+result.longname).encode("utf-8")) - self.handle.setNodeAttr(resultGroup, "unit", - repr(result.unit).encode("utf-8")) - if result.dimensions != FieldContainer.INDEX: - idLen = max([len(dim.id.encode("utf-8")) - for dim in result.dimensions]) - dimTable = self.handle.createTable(resultGroup, "dimensions", - {"hash":StringCol(32), - "id":StringCol(idLen)}, - (u"Dimensions of "\ - + result.longname).encode( - "utf-8"), - expectedrows =\ - len(result.dimensions)) - for dim in result.dimensions: - d = dimTable.row - d["hash"] = dim.hash.encode("utf-8") - d["id"] = dim.id.encode("utf-8") - d.append() - self.saveDataContainer(dim) - dimTable.flush() - - def __del__(self): - """ - Closes the HDF5 file. - """ - if hasattr(self, 'handle'): - self.handle.close() + PyTablesPersister.saveField(self.handle, resultGroup, result) Modified: trunk/src/pyphant/pyphant/core/Helpers.py =================================================================== --- trunk/src/pyphant/pyphant/core/Helpers.py 2009-09-01 21:15:42 UTC (rev 657) +++ trunk/src/pyphant/pyphant/core/Helpers.py 2009-09-28 10:18:07 UTC (rev 658) @@ -29,14 +29,13 @@ # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -import os, os.path - def getPyphantPath(subdir = '/'): """ returns full pyphant path with optional subdirectory subdir -- subdirectory that is created if it does not exist already, recursive creation of directories is supported also. """ + import os homedir = os.path.expanduser('~') if not subdir.startswith('/'): subdir = '/' + subdir @@ -55,6 +54,7 @@ return path def getUsername(): + import os enc = lambda s: unicode(s, "utf-8") import platform pltform = platform.system() @@ -77,6 +77,8 @@ """ Enables logging to stdout for debug purposes. """ + import logging + import sys l = logging.getLogger("pyphant") l.setLevel(logging.DEBUG) f = logging.Formatter('%(asctime)s [%(name)s|%(levelname)s] %(message)s') @@ -84,3 +86,33 @@ h.setFormatter(f) l.addHandler(h) l.info("Logger 'pyphant' has been configured for debug purposes.") + +def uc2utf8(stype): + """ + Returns utf-8 encoded version of stype, if stype was unicode, else + stype is returned. + If stype is of ListType, the above applies for all entries of the list. + """ + from types import UnicodeType, ListType + def convert(arg): + if isinstance(arg, UnicodeType): + return arg.encode('utf-8') + return arg + if isinstance(stype, ListType): + return map(convert, stype) + return convert(stype) + +def utf82uc(stype): + """ + Returns a unicode object created from a utf-8 encoded string. + If the input was unicode, it is returned unchanged. + List are treated similar to uc2utf8, see docstring there. + """ + from types import StringType, ListType + def convert(arg): + if isinstance(arg, StringType): + return unicode(arg, 'utf') + return arg + if isinstance(stype, ListType): + return map(convert, stype) + return convert(stype) Modified: trunk/src/pyphant/pyphant/core/KnowledgeManager.py =================================================================== --- trunk/src/pyphant/pyphant/core/KnowledgeManager.py 2009-09-01 21:15:42 UTC (rev 657) +++ trunk/src/pyphant/pyphant/core/KnowledgeManager.py 2009-09-28 10:18:07 UTC (rev 658) @@ -28,6 +28,7 @@ # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +from __future__ import with_statement """ This module provides the KnowledgeManager class as well as some helper methods @@ -65,7 +66,7 @@ HTTP_REQUEST_KM_ID_PATH = "/request_km_id" HTTP_REQUEST_DC_DETAILS_PATH = "/request_dc_details?dcid=" # Maximum number of DCs to store in cache: -CACHE_SIZE = 50 +CACHE_SIZE = 10 # Timeout for cached DCs in seconds: CACHE_TIMEOUT = 3600 # Number of hits a DC has to have at least in order to be cached: @@ -193,8 +194,6 @@ """ if self.isServerRunning(): self.stopServer() - for handler in self.H5FileHandlers.itervalues(): - handler.__del__() def registerH5(self, filename, mode = 'a', registerContents = True): """ @@ -242,7 +241,8 @@ filename -- path to the HDF5 file """ h5fh = self.getH5FileHandler(filename) - summaryDict = h5fh.loadSummary() + with h5fh: + summaryDict = h5fh.loadSummary() for dcId, summary in summaryDict.items(): if not self._storage.has_key(dcId): self._storage[dcId] = {'lasthit':None, @@ -469,7 +469,8 @@ filename = getFilenameFromDcId(dc.id) self.registerH5(filename, 'w', False) handler = self.getH5FileHandler(filename) - handler.saveDataContainer(dc) + with handler: + handler.saveDataContainer(dc) self.refreshH5(filename) def registerFMF(self, filename): @@ -567,8 +568,8 @@ dir = self._http_dir) os.close(osFileId) handler = H5FileHandler(filename, 'w') - handler.saveDataContainer(dc) - del handler + with handler: + handler.saveDataContainer(dc) dc_url = self._getServerURL() + "/" + os.path.basename(filename) else: try: @@ -607,18 +608,19 @@ dc_id) if not found_in_cache: try: - dc = dcinfo['filehandler'].loadDataContainer(dc_id) + handler = dcinfo['filehandler'] + with handler: + dc = handler.loadDataContainer(dc_id) except Exception, excep: raise KnowledgeManagerException("DC ID '%s' known, but " - "cannot be read from file " - "'%s'." % (dc_id, - localfilename), - excep) + "cannot be read" + "." % (dc_id, ), excep) if use_cache and dcinfo['hitcount'] >= CACHE_THRESHHOLD: docache = False if len(self._cache) >= CACHE_SIZE: minhitcount = sys.maxint - for cachedid, cacheddcinfo in self._cache.iteritems(): + for cachedid in self._cache.keys(): + cacheddcinfo = self._storage[cachedid] if (now - cacheddcinfo['lasthit']) >= CACHE_TIMEOUT: cacheddcinfo['hitcount'] = 0 self._cache.pop(cachedid) @@ -643,7 +645,8 @@ filename = getFilenameFromDcId(dc_id) urllib.urlretrieve(dc_url, filename) self.registerH5(filename) - dc = self.H5FileHandlers[filename].loadDataContainer(dc_id) + with self.H5FileHandlers[filename] as handler: + dc = handler.loadDataContainer(dc_id) else: raise KnowledgeManagerException("DC ID '%s' is unknown." % (dc_id, )) Modified: trunk/src/pyphant/pyphant/core/PyTablesPersister.py =================================================================== --- trunk/src/pyphant/pyphant/core/PyTablesPersister.py 2009-09-01 21:15:42 UTC (rev 657) +++ trunk/src/pyphant/pyphant/core/PyTablesPersister.py 2009-09-28 10:18:07 UTC (rev 658) @@ -66,6 +66,7 @@ from pyphant.core import (CompositeWorker, DataContainer) from tables import StringCol, Col from pyphant.quantities.PhysicalQuantities import PhysicalQuantity +from ImageProcessing.AutoFocus import FocusSlice # For loading FCs... import scipy import logging _logger = logging.getLogger("pyphant") Modified: trunk/src/pyphant/pyphant/core/WebInterface.py =================================================================== --- trunk/src/pyphant/pyphant/core/WebInterface.py 2009-09-01 21:15:42 UTC (rev 657) +++ trunk/src/pyphant/pyphant/core/WebInterface.py 2009-09-28 10:18:07 UTC (rev 658) @@ -357,7 +357,7 @@ """ if self.disabled: return self.get_disabled() - findrows = [['type', 'longname', 'shortname', 'user', 'host']] + findrows = [['type', 'longname', 'shortname', 'creator', 'machine']] if path.startswith('/?'): query = parse_qs(path[2:]) else: @@ -418,7 +418,7 @@ if dateto != None: dateto = dateto.groups() resrows = [['details', 'type', 'longname', 'shortname', - 'user', 'host', 'date', 'data']] + 'creator', 'machine', 'date', 'data']] for summary in summarydict.itervalues(): add = True for key in findrows[0]: @@ -455,8 +455,8 @@ summary['type'], summary['longname'], summary['shortname'], - summary['user'], - summary['host'], + summary['creator'], + summary['machine'], summary['date'], HTMLDnldForm(summary['id'])] resrows.append(row) Added: trunk/src/pyphant/pyphant/tests/TestAutoFocus.py =================================================================== --- trunk/src/pyphant/pyphant/tests/TestAutoFocus.py (rev 0) +++ trunk/src/pyphant/pyphant/tests/TestAutoFocus.py 2009-09-28 10:18:07 UTC (rev 658) @@ -0,0 +1,206 @@ +#!/usr/bin/env python2.5 +# -*- coding: utf-8 -*- + +# Copyright (c) 2006-2009, Rectorate of the University of Freiburg +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the Freiburg Materials Research Center, +# University of Freiburg nor the names of its contributors may be used to +# endorse or promote products derived from this software without specific +# prior written permission. +# +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER +# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +u"""Provides unittest classes for AutoFocus worker. +""" + +__id__ = "$Id$".replace('$','') +__author__ = "$Author$".replace('$','') +__version__ = "$Revision$".replace('$','') +# $Source$ + +import unittest +import pkg_resources +pkg_resources.require("pyphant") +from pyphant.quantities.PhysicalQuantities import PhysicalQuantity +from pyphant.core.DataContainer import FieldContainer, SampleContainer +from ImageProcessing import AutoFocus as AF +import numpy + + +class CubeTestCase(unittest.TestCase): + def setUp(self): + self.cube1 = AF.Cube([slice(0, 10), + slice(0, 10), + slice(0, 10)]) + self.cube2 = AF.Cube([slice(3, 5), + slice(4, 6), + slice(-5, 7)]) + def tearDown(self): + pass + + def testEq(self): + cube1c = AF.Cube(self.cube1.slices) + assert self.cube1 == cube1c + assert not self.cube1.__eq__(self.cube2) + + def testAnd(self): + expected = AF.Cube([slice(3, 5), slice(4, 6), slice(0, 7)]) + assert self.cube1 & self.cube2 == expected + + def testOr(self): + expected = AF.Cube([slice(0, 10), slice(0, 10), slice(-5, 10)]) + assert self.cube1 | self.cube2 == expected + + def testVolume(self): + assert self.cube1.getVolume() == 1000 + assert self.cube2.getVolume() == 48 + assert AF.Cube([slice(0, 0), + slice(0, 1000), + slice(0, 1000)]).getVolume() == 0 + + def testSubCube(self): + expected = AF.Cube([slice(3,5 ), slice(-5, 7)]) + assert self.cube2.getSubCube([0, 2]) == expected + + def testGetEdgeLength(self): + assert self.cube2.getEdgeLength(0) == 2 + assert self.cube2.getEdgeLength(1) == 2 + assert self.cube2.getEdgeLength(2) == 12 + + def testSub(self): + expected = AF.Cube([slice(-3, 7), + slice(-4, 6), + slice(5, 15)]) + assert self.cube1 - self.cube2 == expected + + def testCenter(self): + expected = (5.0, 5.0, 5.0) + assert self.cube1.getCenter() == expected + + +class ZTubeTestCase(unittest.TestCase): + def setUp(self): + slices = [slice(0, 10), slice(0, 10)] + mask = numpy.ones((10, 10), dtype=bool) + fslice = AF.FocusSlice(slices, 10.0, mask) + self.ztube = AF.ZTube(fslice, 0, 1, 0.5, 0.5) + testslices1 = [slice(3, 12), slice(2, 9)] + mask1 = numpy.ones((9, 7), dtype=bool) + self.testfslice1 = AF.FocusSlice(testslices1, 12.0, mask1) + testslices2 = [slice(7, 17), slice(8, 16)] + mask2 = numpy.ones((10, 8), dtype=bool) + self.testfslice2 = AF.FocusSlice(testslices2, 8.0, mask2) + + def tearDown(self): + pass + + def testMatching(self): + assert self.ztube.match(self.testfslice1, 1) + assert not self.ztube.match(self.testfslice2, 1.01) + expectedyx = AF.Cube([slice(0, 12), + slice(0, 10)]) + expectedz = AF.Cube([slice(-1, 2)]) + assert self.ztube.yxCube == expectedyx + assert self.ztube.zCube == expectedz + assert self.ztube.focusedFSlice == self.testfslice1 + assert self.ztube.focusedZ == 1 + + +class AutoFocusTestCase(unittest.TestCase): + def setUp(self): + from pyphant.core.KnowledgeManager import KnowledgeManager + km = KnowledgeManager.getInstance() + sl1 = [slice(PhysicalQuantity('1.0mm'), + PhysicalQuantity('2.0mm')), + slice(PhysicalQuantity('1.5mm'), + PhysicalQuantity('3.5mm'))] + sl2 = [slice(PhysicalQuantity('0.8mm'), + PhysicalQuantity('1.9mm')), + slice(PhysicalQuantity('1.7mm'), + PhysicalQuantity('3.4mm'))] + mask1 = numpy.ones((10, 20), dtype=bool) + mask2 = numpy.ones((11, 17), dtype=bool) + fsl1 = AF.FocusSlice(sl1, PhysicalQuantity('10.0mm**-3'), mask1) + self.fsl2 = AF.FocusSlice(sl2, PhysicalQuantity('12.0mm**-3'), mask2) + fc1 = FieldContainer(numpy.array([fsl1])) + fc2 = FieldContainer(numpy.array([self.fsl2])) + fc1.seal() + fc2.seal() + km.registerDataContainer(fc1) + km.registerDataContainer(fc2) + columns = [FieldContainer(numpy.array([.5, 1.0]), + unit=PhysicalQuantity('1.0mm'), + longname='z-value'), + FieldContainer(numpy.array([fc1.id, fc2.id]), + longname="emd5")] + attributes = {u'ztol': PhysicalQuantity('0.5mm')} + self.inputSC = SampleContainer(columns, attributes=attributes) + self.inputSC.seal() + + def tearDown(self): + pass + + def testAutofocus(self): + columns = AF.autofocus(self.inputSC, 0.5, 0.75) + inclusionSC = SampleContainer(columns, + "AutoFocus") + for fc in inclusionSC.columns: + assert fc.data.shape == (1, ) + zfc, yfc, xfc, dfc, ffc = inclusionSC.columns + assert zfc.data[0] * zfc.unit == PhysicalQuantity('1.0mm') + assert (yfc.data[0] * yfc.unit, + xfc.data[0] * xfc.unit) == self.fsl2.getCenter() + assert ffc.data[0] * ffc.unit == PhysicalQuantity('12.0mm**-3') + + +class FocusSliceTestCase(unittest.TestCase): + def setUp(self): + pass + + def tearDown(self): + pass + + def testSaveLoadFocusSlice(self): + mask = numpy.ones((100, 150), dtype=bool) + slices = [slice(PhysicalQuantity('100mm'), + PhysicalQuantity('200mm')), + slice(PhysicalQuantity('150mm'), + PhysicalQuantity('350mm'))] + fslice = AF.FocusSlice(slices, PhysicalQuantity('10mm**-3'), mask) + fc = FieldContainer(numpy.array([fslice for xr in xrange(1000)])) + fc.seal() + from pyphant.core.KnowledgeManager import KnowledgeManager + km = KnowledgeManager.getInstance() + km.registerDataContainer(fc) + returnfc = km.getDataContainer(fc.id, use_cache=False) + assert returnfc.data[0].slices[0].start == fc.data[0].slices[0].start + + +if __name__ == "__main__": + import sys + if len(sys.argv) == 1: + unittest.main() + else: + suite = unittest.TestLoader().loadTestsFromTestCase( + eval(sys.argv[1:][0])) + unittest.TextTestRunner().run(suite) Modified: trunk/src/pyphant/pyphant/tests/TestH5FileHandler.py =================================================================== --- trunk/src/pyphant/pyphant/tests/TestH5FileHandler.py 2009-09-01 21:15:42 UTC (rev 657) +++ trunk/src/pyphant/pyphant/tests/TestH5FileHandler.py 2009-09-28 10:18:07 UTC (rev 658) @@ -30,6 +30,8 @@ # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +from __future__ import with_statement + u"""Provides unittest classes for H5FileHandler. """ @@ -84,8 +86,9 @@ def testSaveLoad(self): handler = H5FH(self.fcFilename, 'w') - handler.saveDataContainer(self.fc) - fcLoaded = handler.loadDataContainer(self.fc.id) + with handler: + handler.saveDataContainer(self.fc) + fcLoaded = handler.loadDataContainer(self.fc.id) self.assertEqual(self.fc, fcLoaded) @@ -96,14 +99,16 @@ prefix = 'pyphantH5FileHandlerTest') os.close(osHandle) handler = H5FH(self.rofcFilename, 'w') - handler.saveDataContainer(self.fc) + with handler: + handler.saveDataContainer(self.fc) def tearDown(self): os.remove(self.rofcFilename) def testReadOnly(self): handler = H5FH(self.rofcFilename, 'r') - fcLoaded = handler.loadDataContainer(self.fc.id) + with handler: + fcLoaded = handler.loadDataContainer(self.fc.id) self.assertEqual(self.fc, fcLoaded) @@ -141,8 +146,9 @@ def testSaveLoad(self): handler = H5FH(self.scFilename, 'w') - handler.saveDataContainer(self.sc) - scLoaded = handler.loadDataContainer(self.sc.id) + with handler: + handler.saveDataContainer(self.sc) + scLoaded = handler.loadDataContainer(self.sc.id) self.assertEqual(self.sc, scLoaded) @@ -153,14 +159,16 @@ prefix = 'pyphantH5FileHandlerTest') os.close(osHandle) handler = H5FH(self.roscFilename, 'w') - handler.saveDataContainer(self.sc) + with handler: + handler.saveDataContainer(self.sc) def tearDown(self): os.remove(self.roscFilename) def testReadOnly(self): handler = H5FH(self.roscFilename, 'r') - scLoaded = handler.loadDataContainer(self.sc.id) + with handler: + scLoaded = handler.loadDataContainer(self.sc.id) self.assertEqual(self.sc, scLoaded) @@ -171,16 +179,18 @@ prefix = 'pyphantH5FileHandlerTest') os.close(osHandle) handler = H5FH(self.appscFilename, 'w') - handler.saveDataContainer(self.fc) + with handler: + handler.saveDataContainer(self.fc) def tearDown(self): os.remove(self.appscFilename) def testAppend(self): handler = H5FH(self.appscFilename, 'a') - handler.saveDataContainer(self.sc) - fcLoaded = handler.loadDataContainer(self.fc.id) - scLoaded = handler.loadDataContainer(self.sc.id) + with handler: + handler.saveDataContainer(self.sc) + fcLoaded = handler.loadDataContainer(self.fc.id) + scLoaded = handler.loadDataContainer(self.sc.id) self.assertEqual(self.fc, fcLoaded) self.assertEqual(self.sc, scLoaded) @@ -192,14 +202,16 @@ prefix = 'pyphantH5FileHandlerTest') os.close(osHandle) handler = H5FH(self.summFilename, 'w') - handler.saveDataContainer(self.sc) + with handler: + handler.saveDataContainer(self.sc) def tearDown(self): os.remove(self.summFilename) def testSummary(self): handler = H5FH(self.summFilename, 'r') - summarydict = handler.loadSummary() + with handler: + summarydict = handler.loadSummary() scsummary = summarydict[self.sc.id] fcsummary = summarydict[self.fc.id] self.assertEqual(scsummary['id'], self.sc.id) @@ -208,6 +220,10 @@ self.assertEqual(fcsummary['longname'], self.fc.longname) self.assertEqual(scsummary['shortname'], self.sc.shortname) self.assertEqual(fcsummary['shortname'], self.fc.shortname) + self.assertEqual(scsummary['creator'], self.sc.creator) + self.assertEqual(fcsummary['creator'], self.fc.creator) + self.assertEqual(scsummary['machine'], self.sc.machine) + self.assertEqual(fcsummary['machine'], self.fc.machine) self.assertEqual(scsummary['attributes'], self.sc.attributes) self.assertEqual(fcsummary['attributes'], self.fc.attributes) self.assertEqual(fcsummary['unit'], self.fc.unit) Modified: trunk/src/pyphant/pyphant/tests/TestKnowledgeManager.py =================================================================== --- trunk/src/pyphant/pyphant/tests/TestKnowledgeManager.py 2009-09-01 21:15:42 UTC (rev 657) +++ trunk/src/pyphant/pyphant/tests/TestKnowledgeManager.py 2009-09-28 10:18:07 UTC (rev 658) @@ -43,7 +43,7 @@ pkg_resources.require("pyphant") from pyphant.core.KnowledgeManager import KnowledgeManager import pyphant.core.PyTablesPersister as ptp -from pyphant.core.FieldContainer import FieldContainer +from pyphant.core.DataContainer import FieldContainer import numpy as N import tables import urllib @@ -79,7 +79,7 @@ url = "http://" + host + remote_dir + "/knowledgemanager-http-test.h5" # Get remote file and load DataContainer filename, headers = urllib.urlretrieve(url) - h5 = tables.openFile(filename) + h5 = tables.openFile(filename, 'r') for g in h5.walkGroups("/results"): if (len(g._v_attrs.TITLE)>0) \ and (r"\Psi" in g._v_attrs.shortname): @@ -130,7 +130,19 @@ os.remove(filename) km.getDataContainer(dc_id) + def testCache(self): + km = KnowledgeManager.getInstance() + fcids = [] + for xr in xrange(20): + fc = FieldContainer(N.array([1, 2, xr])) + fc.seal() + km.registerDataContainer(fc) + fcids.append(fc.id) + for fcid in fcids: + for rep in xrange(10): + fc = km.getDataContainer(fcid) + if __name__ == "__main__": import sys logger = logging.getLogger('pyphant') Modified: trunk/src/pyphant/pyphant/visualizers/ImageVisualizer.py =================================================================== --- trunk/src/pyphant/pyphant/visualizers/ImageVisualizer.py 2009-09-01 21:15:42 UTC (rev 657) +++ trunk/src/pyphant/pyphant/visualizers/ImageVisualizer.py 2009-09-28 10:18:07 UTC (rev 658) @@ -150,5 +150,17 @@ pylab.ion() pylab.show() +class ImageSaver(object): + name = 'Save Greyscale Image' + def __init__(self, fieldContainer, show=True): + self.fieldContainer = fieldContainer + self.show = show + #testing only: + print("Enter filename: ") + filename = raw_input() + if filename != "": + scipy.misc.imsave('/Users/aheld/CiSE/series/output/' + filename, + fieldContainer.data) + DataVisReg.getInstance().registerVisualizer(TYPE_IMAGE, ImageVisualizer) - +DataVisReg.getInstance().registerVisualizer(TYPE_IMAGE, ImageSaver) Copied: trunk/src/pyphant/pyphant/visualizers/KMVisualizer.py (from rev 657, trunk/src/workers/tools/tools/__init__.py) =================================================================== --- trunk/src/pyphant/pyphant/visualizers/KMVisualizer.py (rev 0) +++ trunk/src/pyphant/pyphant/visualizers/KMVisualizer.py 2009-09-28 10:18:07 UTC (rev 658) @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) 2006-2008, Rectorate of the University of Freiburg +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the Freiburg Materials Research Center, +# University of Freiburg nor the names of its contributors may be used to +# endorse or promote products derived from this software without specific +# prior written permission. +# +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER +# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +u""" +""" + +__id__ = "$Id$" +__author__ = "$Author$" +__version__ = "$Revision$" +# $Source$ + + +from pyphant.core.Connectors import (TYPE_IMAGE, TYPE_ARRAY) +from pyphant.wxgui2.DataVisReg import DataVisReg +from pyphant.core.KnowledgeManager import KnowledgeManager as KM +import webbrowser + + +class KMVisualizer(object): + name='Register @ KnowledgeManager' + def __init__(self, DataContainer, show=True): + self.DataContainer = DataContainer + self.show = show + km = KM.getInstance() + km.registerDataContainer(DataContainer) + dc_id = DataContainer.id + if km.isServerRunning(): + url = 'http://%s:%d/request_dc_details?dcid=%s' % (km._http_host, + km._http_port, + dc_id) + webbrowser.open_new_tab(url) + else: + print "ID of registered DC is: " + dc_id + + +DataVisReg.getInstance().registerVisualizer(TYPE_IMAGE, KMVisualizer) +DataVisReg.getInstance().registerVisualizer(TYPE_ARRAY, KMVisualizer) Modified: trunk/src/pyphant/pyphant/visualizers/__init__.py =================================================================== --- trunk/src/pyphant/pyphant/visualizers/__init__.py 2009-09-01 21:15:42 UTC (rev 657) +++ trunk/src/pyphant/pyphant/visualizers/__init__.py 2009-09-28 10:18:07 UTC (rev 658) @@ -51,4 +51,4 @@ #for module in filter(lambda file: file[-3:]=='.py', files): # if not module == '__init__.py': # exec 'import ' + module[:-3] -import ImageVisualizer, Chart +import ImageVisualizer, Chart, KMVisualizer Modified: trunk/src/pyphant/pyphant/wxgui2/wxPyphantApplication.py =================================================================== --- trunk/src/pyphant/pyphant/wxgui2/wxPyphantApplication.py 2009-09-01 21:15:42 UTC (rev 657) +++ trunk/src/pyphant/pyphant/wxgui2/wxPyphantApplication.py 2009-09-28 10:18:07 UTC (rev 658) @@ -67,6 +67,7 @@ import ConfigureFrame import platform from pyphant.core.KnowledgeManager import KnowledgeManager as KM +import webbrowser pltform = platform.system() class wxPyphantApplication(wx.PySimpleApp): @@ -210,6 +211,7 @@ self._fileMenu.Append( wx.ID_EXIT, "E&xit" ) self._fileMenu.Append( wx.ID_FILE1, "Import HDF5 or FMF from &URL" ) self._fileMenu.Append( wx.ID_FILE2, "&Import local HDF5 or FMF file") + self._fileMenu.Append( wx.ID_FILE3, "Start/stop &web interface") self._menuBar.Append( self._fileMenu, "&File" ) self._closeCompositeWorker = wx.Menu() self._closeCompositeWorker.Append(self.ID_CLOSE_COMPOSITE_WORKER, "&Close Composite Worker") @@ -226,6 +228,7 @@ self.Bind(wx.EVT_MENU, self.onCloseCompositeWorker, id=self.ID_CLOSE_COMPOSITE_WORKER) self.Bind(wx.EVT_MENU, self.onImportURL, id=wx.ID_FILE1) self.Bind(wx.EVT_MENU, self.onImportLocal, id=wx.ID_FILE2) + self.Bind(wx.EVT_MENU, self.onWebInterface, id=wx.ID_FILE3) def createUpdateMenu(self): updateMenu = wx.Menu() @@ -326,7 +329,33 @@ else: dlg.Destroy() + def onWebInterface(self, event): + km = KM.getInstance() + cpt = "Pyphant Web Interface" + msg = "" + if km.web_interface.disabled: + if not km.isServerRunning(): + try: + km.startServer("127.0.0.1", 8000, True) + msg += "Started web server @ 127.0.0.1:8000" + url = 'http://%s:%d/' % (km._http_host, + km._http_port) + webbrowser.open_new(url) + except: + msg += "Could not start web server @ 127.0.0.1:8000" + km.web_interface.disabled = True + else: + km.web_interface.disabled = False + else: + km.web_interface.disabled = True + msg += "Disabled web interface." + if km.isServerRunning(): + km.stopServer() + msg += "\nStopped web server." + dlg = wx.MessageDialog(self, msg, cpt, wx.OK) + dlg.ShowModal() + import optparse def startWxPyphant(): Added: trunk/src/workers/ImageProcessing/ImageProcessing/AutoFocus.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/AutoFocus.py (rev 0) +++ trunk/src/workers/ImageProcessing/ImageProcessing/AutoFocus.py 2009-09-28 10:18:07 UTC (rev 658) @@ -0,0 +1,246 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) 2006-2007, Rectorate of the University of Freiburg +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the Freiburg Materials Research Center, +# University of Freiburg nor the names of its contributors may be used to +# endorse or promote products derived from this software without specific +# prior written permission. +# +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER +# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +u""" +TODO +""" + +__id__ = "$Id$" +__author__ = "$Author$" +__version__ = "$Revision$" +# $Source$ + +from pyphant.core import Worker, Connectors,\ + Param, DataContainer +import ImageProcessing +import numpy, copy +from scipy import ndimage as ndi +from pyphant.quantities.PhysicalQuantities import (isPhysicalQuantity, + PhysicalQuantity) +from pyphant.core.DataContainer import FieldContainer + + +class Cube(object): + def __init__(self, slices): + self.slices = slices + + def _binary(self, other, bifunc1, bifunc2): + bislices = [] + for sli1, sli2 in zip(self.slices, other.slices): + bislice = slice(bifunc1(sli1.start, sli2.start), + bifunc2(sli1.stop, sli2.stop)) + if bislice.stop < bislice.start: + # Weird notation necessary for PhysicalQuantities! + bislice = slice(0 * bislice.start, 0 * bislice.stop) + bislices.append(bislice) + return Cube(bislices) + + def __and__(self, other): + return self._binary(other, max, min) + + def __or__(self, other): + return self._binary(other, min, max) + + def __eq__(self, other): + return self.slices == other.slices + + def __sub__(self, other): + subslices = [] + for sli1, sli2 in zip(self.slices, other.slices): + sub = sli2.start + subslices.append(slice(sli1.start - sub, sli1.stop - sub)) + return Cube(subslices) + + def getSubCube(self, dimlist): + subslices = [] + for dim in xrange(len(self.slices)): + if dim in dimlist: + subslices.append(self.slices[dim]) + return Cube(subslices) + + def getVolume(self): + vol = 1 + for sli in self.slices: + vol *= (sli.stop - sli.start) + return vol + + def getEdgeLength(self, edgeIndex): + return self.slices[edgeIndex].stop - self.slices[edgeIndex].start + + def getCenter(self): + return tuple([(sli.start + sli.stop) / 2.0 for sli in self.slices]) + + +class FocusSlice(Cube): + def __init__(self, slices, focus, mask): + Cube.__init__(self, slices) + self.focus = focus + from types import StringTypes + from pyphant.core.KnowledgeManager import KnowledgeManager + if isinstance(mask, StringTypes): + km = KnowledgeManager.getInstance() + maskfc = km.getDataContainer(unicode(mask).encode('utf-8')) + self.mask = maskfc.data + self.maskEmd5 = unicode(mask).encode('utf-8') + else: + km = KnowledgeManager.getInstance() + maskfc = FieldContainer(mask, longname="FocusSliceMask") + maskfc.seal() + km.registerDataContainer(maskfc) + self.maskEmd5 = unicode(maskfc.id).encode('utf-8') + self.mask = mask + + def __str__(self): + retstr = "FocusSlice(slices=%s, focus=%s, mask=%s)" + return retstr % (self.slices, self.focus, self.mask) + + def __repr__(self): + retstr = "FocusSlice(%s, %s, '%s')" + return retstr % (self.slices.__repr__(), + self.focus.__repr__(), + self.maskEmd5) + + def __eq__(self, other): + eqflag = self.slices == other.slices + eqflag &= self.focus == other.focus + eqflag &= (self.mask == other.mask).all() + return eqflag + + +class ZTube(object): + def __init__(self, fslice, zvalue, ztol, boundRatio, featureRatio): + self.yxCube = Cube(fslice.slices) + self.maxFocus = fslice.focus + self.focusedFSlice = fslice + self.boundRatio = boundRatio + self.featureRatio = featureRatio + self.zCube = Cube([slice(zvalue - ztol, zvalue + ztol)]) + self.ztol = ztol + self.focusedZ = zvalue + + def match(self, fslice, zvalue): + vol = (self.yxCube & fslice).getVolume() + if not isPhysicalQuantity(vol): + vol = float(vol) + yxratio = vol / fslice.getVolume() + fszCube = Cube([slice(zvalue - self.ztol, zvalue + self.ztol)]) + zvol = (self.zCube & fszCube).getVolume() + # weird notation necessary for PhysicalQuantities + if yxratio >= self.boundRatio and zvol != 0 * zvol: + orCube = self.yxCube | fslice + self.yxCube = orCube + self.zCube = self.zCube | fszCube + if fslice.focus > self.maxFocus: + self.maxFocus = fslice.focus + self.focusedFSlice = fslice + self.focusedZ = zvalue + return True + return False + + def getFocusedInclusion(self): + """ + This method returns a tuple + (z, y, x, diameter, focus, zError, yError, xError, diameterError) + corresponding to the most focused feature in the ZTube + """ + #This is just a preliminary example of how to calculate the values... + coordY, coordX = self.focusedFSlice.getCenter() + coordZ = self.focusedZ + edgeL0 = self.focusedFSlice.getEdgeLength(0) + edgeL1 = self.focusedFSlice.getEdgeLength(1) + cEZ, cEY, cEX = self.ztol, edgeL0 / 4.0, edgeL1 / 4.0 + diameter = (edgeL0 * edgeL0 + edgeL1 * edgeL1) ** .5 + diameterError = .1 * diameter + return (coordZ, coordY, coordX, diameter, self.focusedFSlice.focus, + cEZ, cEY, cEX, diameterError) + + +def autofocus(focusSC, boundRatio, featureRatio): + from pyphant.core.KnowledgeManager import KnowledgeManager + km = KnowledgeManager.getInstance() + ztubes = [] + ztol = focusSC.attributes[u'ztol'] + zunit = focusSC['z-value'].unit + for zNumValue, emd5 in zip(focusSC['z-value'].data, focusSC['emd5'].data): + zvalue = zNumValue * zunit + focusFC = km.getDataContainer(unicode(emd5).encode('utf-8')) + for fslice in focusFC.data: + if fslice != 0: + matched = False + for ztube in ztubes: + matched = ztube.match(fslice, zvalue) + if matched: + break + if not matched: + ztubes.append(ZTube(fslice, zvalue, ztol, + boundRatio, featureRatio)) + if ztubes == []: + return [] + fInclusions = [ztube.getFocusedInclusion() for ztube in ztubes] + longnames = ["z-value", "y-value", "x-value", "diameter", "focus"] + shortnames = ["z", "y", "x", "d", "f"] + fIColumns = zip(*fInclusions) + pqdata = [fIColumns[index] for index in xrange(5)] + pqerrors = [fIColumns[index] for index in xrange(5, 9)] + units = [PhysicalQuantity(1, fInclusions[0][index].unit) \ + for index in xrange(5)] + data = [numpy.array([spqd.inUnitsOf(pqunit.unit).value for spqd in pqd]) \ + for pqd, pqunit in zip(pqdata, units)] + errors = [numpy.array([serr.inUnitsOf(pqunit.unit).value for serr in err]) \ + for err, pqunit in zip(pqerrors, units[:4])] + errors.append(None) + return [DataContainer.FieldContainer(dat, unit, err, + longname=ln, shortname=sn) \ + for dat, unit, err, ln, sn in zip(data, units, errors, + longnames, shortnames)] + + +class AutoFocus(Worker.Worker): + API = 2 + VERSION = 1 + REVISION = "$Revision$"[11:-1] + name = "AutoFocus" + _params = [("boundRatio", "Minimal overlap ratio in percent (bounding box)", + 50, None), + ("featureRatio", + "Not implemented", 75, None)] + _sockets = [("focusSC", Connectors.TYPE_ARRAY)] + + @Worker.plug(Connectors.TYPE_ARRAY) + def AutoFocusWorker(self, focusSC, subscriber=0): + columns = autofocus(focusSC, + self.paramBoundRatio.value / 100.0, + self.paramFeatureRatio.value / 100.0) + longname = "AutoFocus" + result = DataContainer.SampleContainer(columns=columns, + longname=longname) + result.seal() + return result Modified: trunk/src/workers/ImageProcessing/ImageProcessing/FitBackground.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/FitBackground.py 2009-09-01 21:15:42 UTC (rev 657) +++ trunk/src/workers/ImageProcessing/ImageProcessing/FitBackground.py 2009-09-28 10:18:07 UTC (rev 658) @@ -92,23 +92,23 @@ @Worker.plug(Connectors.TYPE_IMAGE) def fit_background(self, image, subscriber=0): - poldegree = self.paramPoldegree.value - swidth = self.paramSwidth.value - sheight = self.paramSheight.value - threshold = self.paramThreshold.value - mediansize = self.paramMediansize.value - medianruns = self.paramMedianruns.value - darksize = self.paramDarksize.value - darkruns = self.paramDarkruns.value - brightsize = self.paramBrightsize.value - brightruns = self.paramBrightruns.value + poldegree = int(self.paramPoldegree.value) + swidth = int(self.paramSwidth.value) + sheight = int(self.paramSheight.value) + threshold = int(self.paramThreshold.value) + mediansize = int(self.paramMediansize.value) + medianruns = int(self.paramMedianruns.value) + darksize = int(self.paramDarksize.value) + darkruns = int(self.paramDarkruns.value) + brightsize = int(self.paramBrightsize.value) + brightruns = int(self.paramBrightruns.value) dopreview = self.paramDopreview.value assert image.data.ndim in [2, 3] if image.data.ndim == 2: pile = [image.data] else: pile = image.data - #Median + #Median: for run in xrange(medianruns): pile = [ndimage.median_filter(data, size=mediansize) for data in pile] Modified: trunk/src/workers/ImageProcessing/ImageProcessing/Gradient.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/Gradient.py 2009-09-01 21:15:42 UTC (rev 657) +++ trunk/src/workers/ImageProcessing/ImageProcessing/Gradient.py 2009-09-28 10:18:07 UTC (rev 658) @@ -38,17 +38,13 @@ __version__ = "$Revision$" # $Source$ -from pyphant.core import Worker, Connectors,\ - Param, DataContainer +from pyphant.core import Worker, Connectors, Param, DataContainer import ImageProcessing import numpy, copy -from ImageProcessing.NDImageWorker import pile def gradient(data): - res = numpy.sqrt(sum( - numpy.square(numpy.array(numpy.gradient(data))) - )) - return (res * 255.0).astype(int) / 361 + res = numpy.sqrt(sum(numpy.square(numpy.array(numpy.gradient(data))))) + return (res * 255.0).astype(int) / 361 class Gradient(Worker.Worker): @@ -60,7 +56,9 @@ @Worker.plug(Connectors.TYPE_IMAGE) def gradientWorker(self, image, subscriber=0): - #TODO: Check whether all dimensions have same unit + for dim in image.dimensions: + assert dim.unit == image.dimensions[0].unit, ("Other cases not " + "implemented!") newdata = gradient(image.data) longname = "Gradient" result = DataContainer.FieldContainer( Modified: trunk/src/workers/ImageProcessing/ImageProcessing/MeasureFocus.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/MeasureFocus.py 2009-09-01 21:15:42 UTC (rev 657) +++ trunk/src/workers/ImageProcessing/ImageProcessing/MeasureFocus.py 2009-09-28 10:18:07 UTC (rev 658) @@ -43,8 +43,64 @@ import ImageProcessing import numpy, copy from scipy import ndimage -from ImageProcessing.NDImageWorker import pile +from ImageProcessing.AutoFocus import FocusSlice +def sliceAndMeasure(image, labels, grow, human_output): + data = image.data + dy = (image.dimensions[0].data[1] - image.dimensions[0].data[0]) + dy *= image.dimensions[0].unit + dx = (image.dimensions[1].data[1] - image.dimensions[1].data[0]) + dx *= image.dimensions[1].unit + unit = image.unit / (dy * dx) + slices = ndimage.find_objects(labels.data) + if human_output: + resdata = numpy.zeros(data.shape) + else: + resdata = numpy.zeros(len(slices), dtype=object) + label = 0 + for sl in slices: + label += 1 + if sl[0].stop - sl[0].start >= 3 and sl[1].stop - sl[1].start >= 3: + start = [sl[0].start - grow, sl[1].start - grow] + stop = [sl[0].stop + grow, sl[1].stop + grow] + if start[0] < 0: start[0] = 0 + if start[1] < 0: start[1] = 0 + bigsl = (slice(start[0], stop[0]), slice(start[1], stop[1])) + focus = numpy.sum(data[bigsl]) / data[bigsl].size + if human_output: + resdata[sl] = numpy.where(labels.data[sl] == label, + ... [truncated message content] |
From: <zk...@us...> - 2009-11-23 13:49:30
|
Revision: 660 http://pyphant.svn.sourceforge.net/pyphant/?rev=660&view=rev Author: zklaus Date: 2009-11-23 13:49:19 +0000 (Mon, 23 Nov 2009) Log Message: ----------- Merge branch 'master' into svn-trunk Modified Paths: -------------- trunk/src/pyphant/pyphant/core/DataContainer.py trunk/src/pyphant/pyphant/core/FieldContainer.py trunk/src/pyphant/pyphant/core/H5FileHandler.py trunk/src/pyphant/pyphant/core/Helpers.py trunk/src/pyphant/pyphant/core/KnowledgeManager.py trunk/src/pyphant/pyphant/core/PyTablesPersister.py trunk/src/pyphant/pyphant/core/WebInterface.py trunk/src/pyphant/pyphant/quantities/PhysicalQuantities.py trunk/src/pyphant/pyphant/tests/TestAutoFocus.py trunk/src/pyphant/pyphant/tests/TestH5FileHandler.py trunk/src/pyphant/pyphant/tests/TestKnowledgeManager.py trunk/src/pyphant/pyphant/tests/TestPyTablesPersister.py trunk/src/pyphant/pyphant/wxgui2/wxPyphantApplication.py trunk/src/workers/ImageProcessing/ImageProcessing/AutoFocus.py trunk/src/workers/ImageProcessing/ImageProcessing/MeasureFocus.py trunk/src/workers/ImageProcessing/ImageProcessing/NDImageWorker.py trunk/src/workers/ImageProcessing/ImageProcessing/Watershed.py trunk/src/workers/tools/tools/Emd5Src.py Added Paths: ----------- trunk/src/pyphant/pyphant/core/SQLiteWrapper.py trunk/src/pyphant/pyphant/tests/TestSQLiteWrapper.py Modified: trunk/src/pyphant/pyphant/core/DataContainer.py =================================================================== --- trunk/src/pyphant/pyphant/core/DataContainer.py 2009-09-28 10:33:55 UTC (rev 659) +++ trunk/src/pyphant/pyphant/core/DataContainer.py 2009-11-23 13:49:19 UTC (rev 660) @@ -229,6 +229,10 @@ return u"%s %s" % (self.longname, self.shortname) label = property(_getLabel) + def _getRawDataBytes(self): + return [column.rawDataBytes for column in self.columns] + rawDataBytes = property(_getRawDataBytes) + def generateHash(self, m=None): if m == None: m = hashlib.md5() Modified: trunk/src/pyphant/pyphant/core/FieldContainer.py =================================================================== --- trunk/src/pyphant/pyphant/core/FieldContainer.py 2009-09-28 10:33:55 UTC (rev 659) +++ trunk/src/pyphant/pyphant/core/FieldContainer.py 2009-11-23 13:49:19 UTC (rev 660) @@ -61,6 +61,7 @@ import os, platform, datetime, socket, urlparse from pyphant.quantities.PhysicalQuantities import (isPhysicalQuantity, PhysicalQuantity,_prefixes) from DataContainer import DataContainer, enc, _logger +from types import NoneType #Default variables of indices INDEX_NAMES=[u'i', u'j', u'k', u'l', u'm', u'n'] @@ -74,6 +75,7 @@ hash = hashlib.md5().hexdigest() shortname=u"i" longname=u"index" + rawDataBytes = 0 def seal(self, id=None): pass @@ -260,6 +262,11 @@ return label.replace('1.0 ',r'')#.replace('mu',u'\\textmu{}') shortlabel=property(_getShortLabel) + def _getRawDataBytes(self): + return self.data.nbytes \ + + sum([dim.rawDataBytes for dim in self.dimensions]) + rawDataBytes = property(_getRawDataBytes) + def __deepcopy__(self, memo): self.lock.acquire() data=copy.deepcopy(self.data, memo) @@ -353,7 +360,7 @@ def __eq__(self, other, rtol=1e-5, atol=1e-8): if type(self) != type(other): - if type(other) != IndexMarker: + if type(other) != IndexMarker and type(other) != NoneType: _logger.debug('Cannot compare objects with different type (%s and %s).' % (type(self),type(other))) return False if not (self.typeString == other.typeString): Modified: trunk/src/pyphant/pyphant/core/H5FileHandler.py =================================================================== --- trunk/src/pyphant/pyphant/core/H5FileHandler.py 2009-09-28 10:33:55 UTC (rev 659) +++ trunk/src/pyphant/pyphant/core/H5FileHandler.py 2009-11-23 13:49:19 UTC (rev 660) @@ -51,9 +51,9 @@ """ This class is used to handle IO operations on HDF5 files. """ - def __init__(self, filename, mode = 'a'): + def __init__(self, filename, mode='a'): """ - Opens a HDF5 file. + Opens an HDF5 file. filename -- path to the file that should be opened mode -- mode in which file is opened. Possible values: 'r', 'w', 'a' meaning 'read only', 'overwrite' and 'append'. @@ -98,6 +98,19 @@ uriType = uriType.encode('utf-8') return (resNode, uriType) + def isIndexMarker(self, dcId): + """ + returns True iff the underlying HDF5 file contains dcId and + dcId belongs to an IndexMarker + """ + resNode, uriType = self.getNodeAndTypeFromId(dcId) + if uriType == u'field': + try: + resNode._g_checkHasChild('dimensions') + except tables.NoSuchNodeError: + return True + return False + def loadDataContainer(self, dcId): """ Loads a DataContainer from the HDF5 file and returns it as a @@ -106,13 +119,9 @@ """ resNode, uriType = self.getNodeAndTypeFromId(dcId) if uriType == 'field': - _logger.info("Trying to load field data from node %s..." % resNode) result = self.loadField(resNode) - _logger.info("...successfully loaded.") elif uriType == 'sample': - _logger.info("Trying to load sample data from node %s..." % resNode) result = self.loadSample(resNode) - _logger.info("...successfully loaded.") else: raise TypeError, "Unknown result uriType in <%s>" % resNode._v_title return result @@ -137,59 +146,52 @@ """ Extracts meta data about a given DataContainer and returns it as a dictionary. - dcId -- emd5 of the DC to summarize. If dcId == None, a dictionary - that maps emd5s to summaries is returned. + dcId -- emd5 of the DC to summarize. If the emd5 belongs to an + IndexMarker object, u'IndexMarker' is returned. + If dcId == None, a dictionary that maps emd5s to summaries + is returned, where IndexMarker objects are ignored. """ if dcId == None: summary = {} for group in self.handle.walkGroups(where = "/results"): currDcId = group._v_attrs.TITLE if len(currDcId) > 0: - summary[currDcId] = self.loadSummary(currDcId) + tmp = self.loadSummary(currDcId) + if tmp != u'IndexMarker': + summary[currDcId] = tmp + elif self.isIndexMarker(dcId): + return u'IndexMarker' else: + from pyphant.core.Helpers import (utf82uc, emd52dict) summary = {} summary['id'] = dcId resNode, uriType = self.getNodeAndTypeFromId(dcId) - summary['longname'] = unicode(self.handle.getNodeAttr(resNode, - "longname"), 'utf-8') - summary['shortname'] = unicode(self.handle.getNodeAttr(resNode, - "shortname"), 'utf-8') - emd5_split = dcId.split('/') + summary['longname'] = utf82uc(self.handle.getNodeAttr(resNode, + "longname")) + summary['shortname'] = utf82uc(self.handle.getNodeAttr(resNode, + "shortname")) + summary.update(emd52dict(dcId)) try: - summary['machine'] = unicode(self.handle.getNodeAttr(resNode, - "machine"), 'utf-8') - summary['creator'] = unicode(self.handle.getNodeAttr(resNode, - "creator"), 'utf-8') + summary['machine'] = utf82uc(self.handle.getNodeAttr(resNode, + "machine")) + summary['creator'] = utf82uc(self.handle.getNodeAttr(resNode, + "creator")) except: - summary['machine'] = unicode(emd5_split[2], 'utf-8') - summary['creator'] = unicode(emd5_split[3], 'utf-8') - summary['date'] = unicode(emd5_split[4], 'utf-8') - summary['hash'] = emd5_split[5].split('.')[0] - summary['type'] = unicode(emd5_split[5].split('.')[1], 'utf-8') + pass # machine, creator set by emd52dict(dcId) before attributes = {} if uriType == 'field': for key in resNode.data._v_attrs._v_attrnamesuser: attributes[key]=self.handle.getNodeAttr(resNode.data, key) - unit = eval(unicode(self.handle.getNodeAttr(resNode, "unit"), - 'utf-8')) - try: - if isinstance(unit, (str, unicode)): - unit = unit.replace('^', '**') - if isinstance(unit, unicode): - unit = unit.encode('utf-8') - summary['unit'] = PhysicalQuantity(unit) - except: - try: - summary['unit'] = PhysicalQuantity("1" + unit) - except: - summary['unit'] = unit - try: - dimTable = resNode.dimensions - dimensions = [self.loadSummary(row['id']) + unit = eval(utf82uc(self.handle.getNodeAttr(resNode, "unit"))) + summary['unit'] = unit + dimTable = resNode.dimensions + def filterIndexMarker(emd5): + if self.isIndexMarker(emd5): + return u'IndexMarker' + else: + return emd5 + dimensions = [filterIndexMarker(row['id']) \ for row in dimTable.iterrows()] - except tables.NoSuchNodeError: - dimensions = u'INDEX' - summary['type'] = u'index' summary['dimensions'] = dimensions elif uriType == 'sample': for key in resNode._v_attrs._v_attrnamesuser: @@ -199,7 +201,7 @@ for resId in self.handle.getNodeAttr(resNode, "columns"): nodename = "/results/" + resId columnId = self.handle.getNodeAttr(nodename, "TITLE") - columns.append(self.loadSummary(columnId)) + columns.append(columnId) summary['columns'] = columns summary['attributes'] = attributes return summary Modified: trunk/src/pyphant/pyphant/core/Helpers.py =================================================================== --- trunk/src/pyphant/pyphant/core/Helpers.py 2009-09-28 10:33:55 UTC (rev 659) +++ trunk/src/pyphant/pyphant/core/Helpers.py 2009-11-23 13:49:19 UTC (rev 660) @@ -116,3 +116,18 @@ if isinstance(stype, ListType): return map(convert, stype) return convert(stype) + +def emd52dict(emd5): + """ + returns a dictionary with keys + ('machine', 'creator', 'date', 'hash', 'type') + """ + emd5 = utf82uc(emd5) + emd5_split = emd5.split('/') + retdict = {} + retdict['machine'] = emd5_split[2] + retdict['creator'] = emd5_split[3] + retdict['date'] = emd5_split[4] + retdict['hash'] = emd5_split[5].split('.')[0] + retdict['type'] = emd5_split[5].split('.')[1] + return retdict Modified: trunk/src/pyphant/pyphant/core/KnowledgeManager.py =================================================================== --- trunk/src/pyphant/pyphant/core/KnowledgeManager.py 2009-09-28 10:33:55 UTC (rev 659) +++ trunk/src/pyphant/pyphant/core/KnowledgeManager.py 2009-11-23 13:49:19 UTC (rev 660) @@ -60,17 +60,18 @@ KMHTMLParser, ThreadedHTTPServer) from fmfile import FMFLoader +from pyphant.core.SQLiteWrapper import (SQLiteWrapper, AnyValue) WAITING_SECONDS_HTTP_SERVER_STOP = 5 HTTP_REQUEST_DC_URL_PATH = "/request_dc_url" HTTP_REQUEST_KM_ID_PATH = "/request_km_id" HTTP_REQUEST_DC_DETAILS_PATH = "/request_dc_details?dcid=" -# Maximum number of DCs to store in cache: -CACHE_SIZE = 10 -# Timeout for cached DCs in seconds: -CACHE_TIMEOUT = 3600 -# Number of hits a DC has to have at least in order to be cached: -CACHE_THRESHHOLD = 2 +HTTP_REQUEST_REGISTER_KM = "/request_register_km" +HTTP_REQUEST_SEARCH = "/request_search" +# Limit for sum(DC.rawDataBytes) for DC in cache: +CACHE_MAX_SIZE = 256 * 1024 * 1024 +# Limit for number of stored DCs in cache: +CACHE_MAX_NUMBER = 100 KM_PATH = '/KMstorage/' REHDF5 = re.compile(r'..*\.h5$|..*\.hdf$|..*\.hdf5$') REFMF = re.compile(r'..*\.fmf$') @@ -98,18 +99,23 @@ os.mkdir(makedir) return path -def getFilenameFromDcId(dcId): +def getFilenameFromDcId(dcId, temporary=False): """ Returns a unique filename for the given emd5. """ emd5list = urlparse(dcId + '.h5')[2][2:].split('/') emd5path = '' - for p in emd5list: + for p in emd5list[:-2]: emd5path += (p + '/') - emd5path = emd5path[:-1] + emd5path += emd5list[-2][:10] + '/' + emd5list[-2][11:]\ + + '.' + emd5list[-1] directory = os.path.dirname(emd5path) filename = os.path.basename(emd5path) - return getPyphantPath(KM_PATH + 'by_emd5/' + directory) + filename + if temporary: + subdir = 'tmp/by_emd5/' + else: + subdir = 'by_emd5/' + return getPyphantPath(KM_PATH + subdir + directory) + filename class KnowledgeManagerException(Exception): @@ -138,6 +144,24 @@ return self._parent_excep +class CachedDC(object): + def __init__(self, dc_ref): + self.id = dc_ref.id + self.ref = dc_ref + self.size = dc_ref.rawDataBytes + + def __eq__(self, other): + return self.id == other.id + + +class TestCachedDC(object): + def __init__(self, dc_id): + self.id = dc_id + + def __eq__(self, other): + return self.id == other.id + + class KnowledgeManager(Singleton): """ Knowledge Manager for Pyphant @@ -174,115 +198,74 @@ """ def __init__(self): """ - Sets the unique id for the KM instance and restores all HDF5 files from - the .pyphant directory. + Sets the unique id for the KM instance, sets up the DataBase if + it has not been initialized yet and clears the tmp dir. """ super(KnowledgeManager, self).__init__() self._logger = logging.getLogger("pyphant") - self._storage = {} - self._cache = {} + self._cache = [] + self._cache_size = 0 self.H5FileHandlers = {} self._remoteKMs = {} # key:id, value:url self._server = None self._server_id = uuid1() - self.restoreKnowledge() self.web_interface = WebInterface(self, True) + self.dbase = getPyphantPath('/sqlite3/') + "km_meta.sqlite3" + self.any_value = AnyValue() + with SQLiteWrapper(self.dbase) as wrapper: + wrapper.setup_dbase() + tmpdir = getPyphantPath(KM_PATH + 'tmp/') + if os.path.isdir(tmpdir): + from shutil import rmtree + try: + rmtree(tmpdir) + except OSError: + print "Could not delete '%s'." % (tmpdir, ) - def __del__(self): + def tearDown(self): """ - Stops the HTTP server and closes all open files. + Stops the HTTP server """ if self.isServerRunning(): self.stopServer() - def registerH5(self, filename, mode = 'a', registerContents = True): + def hasDataContainer(self, dcid): """ - Adds the given HDF5 file to the pool and handles all - further IO operations on the given file in a save way. If you want the - knowledge to be stored permanently, use registerURL. - Possible usage: Register a file that does not exist (setting - registerContents to False). It then is created. - Get its H5FileHandler using the getH5FileHandler method. - Use this FileHandler to write some Data to the file. - Use the refreshH5 method to import the new contents into the - KnowledgeManager. - filename -- path to the HDF5 file to be registered. - mode -- see H5FileHandler - registerContents -- whether to register contents of the file as well. + Returns whether the given DC is stored locally. + Never use this method in a 'with SQLiteWrapper(...) as wrapper' + statement! Use wrapper.has_entry(dcid) instead if you already + have a wrapper at your hands or you may end up in a sqlite3 locking + loop. """ - if self.H5FileHandlers.has_key(filename): - raise KnowledgeManagerException("'%s' has already been registered." - % filename) - self.H5FileHandlers[filename] = H5FileHandler(filename, mode) - if registerContents: - self.refreshH5(filename) + with SQLiteWrapper(self.dbase) as wrapper: + has_entry = wrapper.has_entry(dcid) + return has_entry - def getH5FileHandler(self, filename): + def getH5FileHandler(self, filename, mode='r'): """ - Returns a H5FileHandler for the given filename to perform IO - operations on the file in a save way. The file has - to be registered first using registerH5. - As soon as you are done with your IO operations, use refreshH5 - in order to update the changes. + Returns an H5FileHandler for the given filename to perform IO + operations on the file in a save way. filename -- path to the HDF5 file + mode -- see H5FileHandler """ - if self.H5FileHandlers.has_key(filename): - return self.H5FileHandlers[filename] - else: - raise KnowledgeManagerException("'%s' has not been registered.") + return H5FileHandler(filename, mode) - def refreshH5(self, filename): + def registerH5(self, filename, temporary=False): """ - Refreshes the contents of the given file. The file has to be - registered first using registerH5. If a DC emd5 is found that is - already known to the KnownledgeManager, it is not updated, following - the principle that emd5s are unique and DCs that have been given a emd5 - should not me modified any more in any way. + Adds the given file to the knowledge pool. If you want the data to + be copied to the .pyphant directory, use registerURL() instead. filename -- path to the HDF5 file + temporary -- flag that marks data to be deleted upon next + instantiation of a KM Singleton """ h5fh = self.getH5FileHandler(filename) with h5fh: summaryDict = h5fh.loadSummary() - for dcId, summary in summaryDict.items(): - if not self._storage.has_key(dcId): - self._storage[dcId] = {'lasthit':None, - 'hitcount':0, - 'filehandler':h5fh, - 'summary':summary} + with SQLiteWrapper(self.dbase) as wrapper: + for dcId, summary in summaryDict.items(): + if not wrapper.has_entry(dcId): + wrapper.set_entry(summary, filename, temporary) - def restoreKnowledge(self): - """ - Restores knowledge from pyphant path. - """ - def walkfiles(arg, dirname, fnames): - for fname in fnames: - if REHDF5.match(fname.lower()) != None: - if dirname.endswith('/'): - dirname = dirname[:-1] - filename = dirname + '/' + fname - try: - self.registerH5(filename) - except Exception: - self._logger.warn("Could not import '%s'.", filename) - os.path.walk(getPyphantPath(KM_PATH), walkfiles, None) - - def getSummary(self, dcId = None): - """ - Behaves like H5FileHandler.loadSummary(dcId) except that for - dcId == None all DataContainers that are stored locally are browsed. - """ - if dcId == None: - summary = {} - for emd5, dcInfo in self._storage.iteritems(): - summary[emd5] = dcInfo['summary'] - else: - if not self._storage.has_key(dcId): - raise KnowledgeManagerException("DC with ID '%s' is unknown."\ - % (dcId, )) - dcInfo = self._storage[dcId] - summary = dcInfo['summary'] - return summary - def _getServerURL(self): """ Returns the URL of the HTTP server. @@ -409,17 +392,22 @@ % (km_url, ), excep) self._remoteKMs[km_id] = km_url - def registerURL(self, url): + def registerURL(self, url, temporary=False): """ - Registers an HDF5 or FMF file downloadable from given URL and store it - permanently in the .pyphant directory. The content of the file is made + Registers an HDF5 or FMF file downloadable from given URL and stores it + in the .pyphant directory. The content of the file is made available to the KnowledgeManager. HTTP redirects are resolved. The filetype is determined by the extension. url -- URL of the HDF5 or FMF file + temporary -- set to True in order to mark the data to be deleted upon + next instantiation of a KM singleton """ parsed = urlparse(url) - filename = KM_PATH + 'registered/' + parsed[1] + '/' + tmp_extension = '' + if temporary: + tmp_extension = 'tmp/' + filename = KM_PATH + tmp_extension + 'registered/' + parsed[1] + '/' filename += os.path.basename(parsed[2]) directory = os.path.dirname(filename) filename = getPyphantPath(directory) + os.path.basename(filename) @@ -445,43 +433,47 @@ savedto, headers = urllib.urlretrieve(url, filename) self._logger.info("Header information: %s", (str(headers), )) if REFMF.match(filename.lower()) != None: - self.registerFMF(filename) + self.registerFMF(filename, temporary) elif REHDF5.match(filename.lower()) != None: - self.registerH5(filename) + self.registerH5(filename, temporary) else: raise KnowledgeManagerException('Filetype unknown: %s' % (filename, )) - def registerDataContainer(self, dc): + def registerDataContainer(self, dc, temporary=False): """ Registers a DataContainer located in memory using a given - reference and store it permanently. + reference and stores it in the pyphant directory. The DataContainer must have an .id attribute, which could be generated by the datacontainer.seal() method. If the DCs emd5 is already known to the KnowledgeManager, the DC is not registered again since emd5s are unique. dc -- reference to the DataContainer object + temporary -- dc is stored only until another KM singleton is + created. Set this flag to True e.g. for unit tests + or whenever you do not want to produce garbage on + your hard drive. """ if dc.id == None: raise KnowledgeManagerException("Invalid id for DataContainer '"\ + dc.longname + "'") - if not self._storage.has_key(dc.id): - filename = getFilenameFromDcId(dc.id) - self.registerH5(filename, 'w', False) - handler = self.getH5FileHandler(filename) + if not self.hasDataContainer(dc.id): + filename = getFilenameFromDcId(dc.id, temporary) + handler = self.getH5FileHandler(filename, 'w') with handler: handler.saveDataContainer(dc) - self.refreshH5(filename) + self.registerH5(filename, temporary) - def registerFMF(self, filename): + def registerFMF(self, filename, temporary=False): """ Extracts a SampleContainer from a given FMF file and stores it permanently. The emd5 of the SampleContainer that has been generated is returned. filename -- path to the FMF file + temporary -- see registerDataContainer """ sc = FMFLoader.loadFMFFromFile(filename) - self.registerDataContainer(sc) + self.registerDataContainer(sc, temporary) return sc.id def _getDCURLFromRemoteKMs(self, query_dict): @@ -560,7 +552,7 @@ """ assert self.isServerRunning(), "Server is not running." dc_id = query_dict['dcid'] - if self._storage.has_key(dc_id): + if self.hasDataContainer(dc_id): dc = self.getDataContainer(dc_id, True, False) # Wrap data container in temporary HDF5 file osFileId, filename = tempfile.mkstemp(suffix = '.h5', @@ -579,6 +571,41 @@ "URL for DC ID '%s' not found." % (dc_id, ), excep) return dc_url + def getDCFromCache(self, dc_id, filename): + """ + Returns a DC instance from cache or local storage. + Also puts DC to cache if reasonable. + fc_id: emd5 to look for in cache + filename: alternative source if dc_id not present in cache + """ + try: + index = self._cache.index(TestCachedDC(dc_id)) + cached = self._cache.pop(index) + self._cache.append(cached) + return cached.ref + except ValueError: + with self.getH5FileHandler(filename) as handler: + dc = handler.loadDataContainer(dc_id) + self._attemptToCacheDC(dc) + return dc + + def _attemptToCacheDC(self, dc): + cache_item = CachedDC(dc) + if cache_item.size > CACHE_MAX_SIZE: + return + number_fits = len(self._cache) < CACHE_MAX_NUMBER + self._cache.reverse() + if not number_fits: + self._cache_size -= self._cache.pop().size + desired_size = CACHE_MAX_SIZE - cache_item.size + not_size_fits = self._cache_size > desired_size + while not_size_fits: + self._cache_size -= self._cache.pop().size + not_size_fits = self._cache_size > desired_size + self._cache.reverse() + self._cache.append(cache_item) + self._cache_size += cache_item.size + def getDataContainer(self, dc_id, use_cache = True, try_remote = True): """ Returns DataContainer matching the given id. @@ -587,55 +614,18 @@ lookups (default: True) try_remote -- Try to get DC from remote KMs (default: True) """ - dc = None - found_in_cache = False - islocal = self._storage.has_key(dc_id) - if islocal: - dcinfo = self._storage[dc_id] - now = time.time() - if dcinfo['lasthit'] != None: - if (now - dcinfo['lasthit']) >= CACHE_TIMEOUT: - dcinfo['hitcount'] = 0 - dcinfo['lasthit'] = now - dcinfo['hitcount'] += 1 + filename = None + with SQLiteWrapper(self.dbase) as wrapper: + try: + filename = wrapper[dc_id]['storage'] + except KeyError: + pass + if filename != None: if use_cache: - if self._cache.has_key(dc_id): - dc = self._cache[dc_id] - found_in_cache = True - self._logger.debug("DC with ID '%s' found in cache.", dc_id) - else: - self._logger.debug("DC with ID '%s' not found in cache.", - dc_id) - if not found_in_cache: - try: - handler = dcinfo['filehandler'] - with handler: - dc = handler.loadDataContainer(dc_id) - except Exception, excep: - raise KnowledgeManagerException("DC ID '%s' known, but " - "cannot be read" - "." % (dc_id, ), excep) - if use_cache and dcinfo['hitcount'] >= CACHE_THRESHHOLD: - docache = False - if len(self._cache) >= CACHE_SIZE: - minhitcount = sys.maxint - for cachedid in self._cache.keys(): - cacheddcinfo = self._storage[cachedid] - if (now - cacheddcinfo['lasthit']) >= CACHE_TIMEOUT: - cacheddcinfo['hitcount'] = 0 - self._cache.pop(cachedid) - docache = True - break - elif cacheddcinfo['hitcount'] < minhitcount: - minhitcount = cacheddcinfo['hitcount'] - if docache == False \ - and dcinfo['hitcount'] > minhitcount: - docache == True - else: - docache = True - if docache: - self._cache[dc_id] = dc - self._logger.debug("Cached DC with ID '%s'", dc_id) + return self.getDCFromCache(dc_id, filename) + with self.getH5FileHandler(filename) as handler: + dc = handler.loadDataContainer(dc_id) + return dc elif try_remote: dc_url = self._getDCURLFromRemoteKMs({'dcid':dc_id, 'lastkmidindex':-1}) @@ -645,14 +635,48 @@ filename = getFilenameFromDcId(dc_id) urllib.urlretrieve(dc_url, filename) self.registerH5(filename) - with self.H5FileHandlers[filename] as handler: + with self.getH5FileHandler(filename) as handler: dc = handler.loadDataContainer(dc_id) + return dc else: raise KnowledgeManagerException("DC ID '%s' is unknown." % (dc_id, )) - return dc + def getEmd5List(self): + """ + returns a list with all locally known DataContainer ids. + """ + with SQLiteWrapper(self.dbase) as wrapper: + return wrapper.get_emd5_list() + def search(self, result_keys, search_dict={}, order_by=None, + order_asc=True, limit=-1, offset=0, distinct=False): + """ + See SQLiteWrapper.get_andsearch_result() + """ + with SQLiteWrapper(self.dbase) as wrapper: + return wrapper.get_andsearch_result( + result_keys, search_dict, order_by, order_asc, + limit, offset, distinct) + + def getSummary(self, dc_id): + """ + This method returns a dictionary with meta information about + the given DC. + """ + with SQLiteWrapper(self.dbase) as wrapper: + # TODO: usage of rowwrapper is not optimal in performance + rowwrapper = wrapper[dc_id] + keys = list(SQLiteWrapper.all_keys) + if dc_id.endswith('field'): + keys.remove('columns') + elif dc_id.endswith('sample'): + keys.remove('unit') + keys.remove('dimensions') + summary = dict([(key, rowwrapper[key]) for key in keys]) + return summary + + class KMHTTPRequestHandler(SimpleHTTPRequestHandler): """ Helper class for KnowledgeManager that handles HTTP requests. @@ -691,6 +715,8 @@ httpanswer = self._do_POST_request_dc_url() elif self.path == HTTP_REQUEST_KM_ID_PATH: httpanswer = self._do_POST_request_km_id() + elif self.path == HTTP_REQUEST_REGISTER_KM: + httpanswer = self._do_POST_request_register_km() else: code = 400 message = "Unknown request path '%s'." % (self.path, ) @@ -753,6 +779,20 @@ httpanswer = HTTPAnswer(400, "Cannot interpret query.") return httpanswer + def _do_POST_request_register_km(self): + length = int( self.headers['content-length'] ) + query = self.rfile.read(length) + dict = cgi.parse_qs(query) + km = KMHTTPRequestHandler._km + host = dict['remote_km_host'][0] + port = int(dict['remote_km_port'][0]) + try: + km.registerKnowledgeManager(host, port, False) + except KnowledgeManagerException: + emsg = "Host '%s:%d' is not a KnowledgeManager." + return HTTPAnswer(400, emsg % (host, port)) + return km.web_interface.get_frontpage("/") + def do_GET(self): """ Returns a requested HDF5 from temporary directory or the web frontend @@ -765,6 +805,8 @@ km.web_interface.get_frontpage(self.path).sendTo(self) elif self.path.startswith(HTTP_REQUEST_DC_DETAILS_PATH): km.web_interface.get_details(self.path).sendTo(self) + elif self.path.startswith(HTTP_REQUEST_SEARCH): + km.web_interface.get_search(self.path).sendTo(self) else: f = self.send_head() if f: @@ -795,7 +837,7 @@ self.send_error(404, "File not found") return None self.send_response(200) - self.send_header("Content-type", "application/x-hdf") + #self.send_header("Content-type", "application/x-hdf") fs = os.fstat(f.fileno()) self.send_header("Content-Length", str(fs[6])) self.send_header("Last-Modified", self.date_time_string(fs.st_mtime)) Modified: trunk/src/pyphant/pyphant/core/PyTablesPersister.py =================================================================== --- trunk/src/pyphant/pyphant/core/PyTablesPersister.py 2009-09-28 10:33:55 UTC (rev 659) +++ trunk/src/pyphant/pyphant/core/PyTablesPersister.py 2009-11-23 13:49:19 UTC (rev 660) @@ -293,9 +293,10 @@ creator = unicode(h5.getNodeAttr(resNode, "creator"), 'utf-8') machine = unicode(h5.getNodeAttr(resNode, "machine"), 'utf-8') except: - import Helpers - creator = Helpers.getUsername() - machine = Helpers.getMachine() + from pyphant.core.Helpers import emd52dict + emd5dict = emd52dict(resNode._v_title) + creator = emd5dict['creator'] + machine = emd5dict['machine'] data = scipy.array(resNode.data.read()) def loads(inputList): if type(inputList)==type([]): Added: trunk/src/pyphant/pyphant/core/SQLiteWrapper.py =================================================================== --- trunk/src/pyphant/pyphant/core/SQLiteWrapper.py (rev 0) +++ trunk/src/pyphant/pyphant/core/SQLiteWrapper.py 2009-11-23 13:49:19 UTC (rev 660) @@ -0,0 +1,672 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) 2006-2009, Rectorate of the University of Freiburg +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the Freiburg Materials Research Center, +# University of Freiburg nor the names of its contributors may be used to +# endorse or promote products derived from this software without specific +# prior written permission. +# +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER +# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +""" +This module provides a wrapper class that translates the +KnowledgeManager's summary dictionaries to an SQLite3 database. +""" +__id__ = "$Id$" +__author__ = "$Author$" +__version__ = "$Revision$" +# $Source$: + +import sqlite3 +import time +from pyphant.core.Helpers import (utf82uc, uc2utf8, emd52dict) +from pyphant.quantities.PhysicalQuantities import (PhysicalQuantity, + PhysicalUnit) +from types import (FloatType, IntType, LongType, StringTypes) + +def quantity2powers(quantity): + if isinstance(quantity, PhysicalQuantity): + return tuple(quantity.unit.powers) + elif isinstance(quantity, (FloatType, IntType, LongType)): + return (0, ) * 10 + else: + raise ValueError("Expected (PhysicalQuantity, FloatType, IntType, "\ + "LongType) but got %s instead."\ + % (type(quantity), )) + +def str2number(str): + try: + value = int(str) + except ValueError: + try: + value = long(str) + except ValueError: + value = float(str) + return value + +def quantity2dbase(quantity): + if isinstance(quantity, (FloatType, IntType, LongType)): + return quantity.__repr__() + elif isinstance(quantity, PhysicalQuantity): + return "P%s;%s" % (quantity.value.__repr__(), + quantity.getUnitName()) + else: + raise ValueError("Expected (PhysicalQuantity, FloatType, IntType, "\ + "LongType) but got %s instead."\ + % (type(quantity), )) + +def dbase2quantity(dbase): + if isinstance(dbase, StringTypes): + if dbase.startswith("P"): + tmp = dbase[1:].split(';') + return PhysicalQuantity(str2number(tmp[0]), tmp[1]) + else: + return str2number(dbase) + else: + raise ValueError("Broken FC unit in dbase: %s" % (dbase.__repr__(), )) + +def date2dbase(date): + """extends a short datestring to YYYY-MM-DD_hh:mm:ss.ssssss standard + """ + assert len(date) in [4, 7, 10, 13, 16, 19, 21, 22, 23, 24, 25, 26] + date = date.replace(' ', '_') + complete_str = '0000-01-01_00:00:00.000000' + return date + complete_str[len(date):] + +def emd52type(emd5): + if emd5.endswith('d'): + return 'fc' + elif emd5.endswith('e'): + return 'sc' + else: + raise ValueError(emd5) + +def replace_type(str, type): + if type == 'field': + return str % ('fc', ) + elif type == 'sample': + return str % ('sc', ) + +def get_wildcards(length, char, braces=False, commas=True): + if braces: + wc = '(' + else: + wc = '' + for index in xrange(length): + wc += char + if commas: + wc += ',' + wc += ' ' + if commas: + wc = wc[:-2] + else: + wc = wc[:-1] + if braces: + wc += ')' + return wc + + +class AnyValue(): + """Dummy class for use in search queries + """ + def __init__(self): + pass + + +class SQLiteWrapper(object): + """Wrapper class for DC meta data <-> sqlite3 + """ + common_keys = ['longname', 'shortname', 'machine', + 'creator', 'hash', 'date'] + writable_keys = ['storage'] + fast_keys = ['machine', 'creator', 'date', 'hash', 'type', 'id'] + all_keys = ['id', 'hash', 'longname', 'shortname', 'machine', 'creator', + 'date', 'type', 'attributes', 'storage', 'unit', 'columns', + 'dimensions'] + common_result_keys = common_keys + ['id', 'type', 'storage'] + one_to_one_search_keys = ['longname', 'shortname', 'machine', + 'creator', 'hash', 'storage'] + one_to_one_result_keys = one_to_one_search_keys + ['date', 'id', 'type'] + common_search_keys = one_to_one_search_keys + ['id', 'attributes', + 'date_from', 'date_to'] + fc_search_keys = common_search_keys + ['unit', 'dimensions'] + sc_search_keys = common_search_keys + ['columns'] + sortable_keys = common_keys + ['id', 'storage', 'type'] + any_value = AnyValue() + + def __init__(self, database, timeout=60.0): + """ + Arguments: + - database: database to connect to + """ + self.database = database + self.timeout = timeout + self.connection = None + self.cursor = None + + def __enter__(self): + assert self.connection == None + assert self.cursor == None + self.connection = sqlite3.connect(self.database, self.timeout, + detect_types=sqlite3.PARSE_DECLTYPES) + self.cursor = self.connection.cursor() + return self + + def __exit__(self, type, value, traceback): + if type == None: + try: + self.connection.commit() + except: + print "Could not commit changes to database." + if hasattr(self.cursor, 'close'): + self.cursor.close() + if hasattr(self.connection, 'close'): + self.connection.close() + self.cursor = None + self.connection = None + + def __getitem__(self, emd5): + if self.has_entry(emd5): + if emd5.endswith('field'): + return FCRowWrapper(emd5, self.cursor) + elif emd5.endswith('sample'): + return SCRowWrapper(emd5, self.cursor) + raise KeyError(emd5) + + def setup_dbase(self): + sqlite3.register_converter('QUANTITY', dbase2quantity) + def createTable(table_name, columns, cursor): + query = "CREATE TABLE IF NOT EXISTS %s (" % (table_name, ) + for name, type in columns: + query += name + " " + type + ", " + query = query[:-2] + ")" + cursor.execute(query) + def createTrigger(trigger_name, action, table_name, + statements, cursor): + query = "CREATE TRIGGER IF NOT EXISTS %s AFTER %s ON %s "\ + "FOR EACH ROW BEGIN %s END" + st_query = '' + for st in statements: + st_query += st + ';' + cursor.execute(query % (trigger_name, action, + table_name, st_query)) + #create tables: + columns = [('sc_id', 'TEXT PRIMARY KEY UNIQUE NOT NULL'), + ('longname', 'TEXT'), + ('shortname', 'TEXT'), + ('machine', 'TEXT'), + ('creator', 'TEXT'), + ('date', 'TEXT'), + ('hash', 'TEXT'), + ('storage', 'TEXT')] + createTable("km_sc", columns, self.cursor) + columns[0] = ('fc_id', 'TEXT PRIMARY KEY UNIQUE NOT NULL') + columns.insert(7, ('unit', 'QUANTITY')) + columns.insert(8, ('bu_id', 'INT')) + createTable("km_fc", columns, self.cursor) + columns = [('sc_id', 'TEXT NOT NULL'), + ('fc_id', 'TEXT NOT NULL'), + ('fc_index', 'INT NOT NULL'), + ('', 'UNIQUE(sc_id, fc_id, fc_index)'), + ('', 'PRIMARY KEY(sc_id, fc_id, fc_index)')] + createTable("km_sc_columns", columns, self.cursor) + columns = [('fc_id', 'TEXT NOT NULL'), + ('dim_id', 'TEXT NOT NULL'), + ('dim_index', 'INT NOT NULL'), + ('', 'UNIQUE(fc_id, dim_id, dim_index)'), + ('', 'PRIMARY KEY(fc_id, dim_id, dim_index)')] + createTable("km_fc_dimensions", columns, self.cursor) + columns = [('dc_id', 'TEXT NOT NULL'), + ('key', 'TEXT NOT NULL'), + ('value', 'TEXT'), + ('', 'UNIQUE(dc_id, key)'), + ('', 'PRIMARY KEY(dc_id, key)')] + createTable('km_attributes', columns, self.cursor) + columns = [('dc_id', 'TEXT PRIMARY KEY UNIQUE NOT NULL')] + createTable("km_temporary", columns, self.cursor) + columns = [('bu_id', 'INTEGER PRIMARY KEY AUTOINCREMENT '\ + 'NOT NULL UNIQUE'), + ('m', 'INT'), + ('g', 'INT'), + ('s', 'INT'), + ('A', 'INT'), + ('K', 'INT'), + ('mol', 'INT'), + ('cd', 'INT'), + ('rad', 'INT'), + ('sr', 'INT'), + ('EUR', 'INT'), + ('', 'UNIQUE(m, g, s, A, K, mol, cd, rad, sr, EUR)')] + createTable('km_base_units', columns, self.cursor) + #create triggers: + createTrigger('trigger_del_fc', 'DELETE', 'km_fc', + ['DELETE FROM km_attributes WHERE dc_id=OLD.fc_id', + 'DELETE FROM km_fc_dimensions WHERE fc_id=OLD.fc_id'], + self.cursor) + createTrigger('trigger_del_sc', 'DELETE', 'km_sc', + ['DELETE FROM km_attributes WHERE dc_id=OLD.sc_id', + 'DELETE FROM km_sc_columns WHERE sc_id=OLD.sc_id'], + self.cursor) + createTrigger('trigger_del_tmp', 'DELETE', 'km_temporary', + ['DELETE FROM km_fc WHERE fc_id=OLD.dc_id', + 'DELETE FROM km_sc WHERE sc_id=OLD.dc_id'], + self.cursor) + #clean tmp: + self.cursor.execute("DELETE FROM km_temporary") + #add IndexMarker dummy if not present yet: + from pyphant.core.DataContainer import IndexMarker + im = IndexMarker() + im_summary = {'id':'IndexMarker', 'longname':im.longname, + 'shortname':im.shortname, 'hash':im.hash, + 'creator':None, 'machine':None, + 'date':'xxxx-xx-xx_xx:xx:xx.xxxxxx', + 'unit':1, 'dimensions':['IndexMarker'], 'attributes':{}} + self.set_entry(im_summary, None) + + def has_entry(self, id): + exe = self.cursor.execute + if id == 'IndexMarker': + type = 'fc' + else: + type = emd52type(id) + exe("SELECT %s_id FROM km_%s WHERE %s_id=?" % (type, type, type), + (id, )) + return self.cursor.fetchone() != None + + def _set_fc_keys(self, insert_dict, summary): + exe = self.cursor.execute + insert_dict['fc_id'] = summary['id'] + insert_dict['unit'] = quantity2dbase(summary['unit']) + try: + exe("INSERT OR ABORT INTO km_base_units "\ + "(m, g, s, A, K, mol, cd, rad, sr, EUR) "\ + "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + quantity2powers(summary['unit'])) + l_row_id = self.cursor.lastrowid + except sqlite3.IntegrityError: + exe("SELECT bu_id FROM km_base_units WHERE m=? AND g=? "\ + "AND s=? AND A=? AND K=? AND mol=? AND cd=? AND rad=? "\ + "AND sr=? AND EUR=?", quantity2powers(summary['unit'])) + tmp = self.cursor.fetchone() + assert tmp != None + l_row_id = tmp[0] + insert_dict['bu_id'] = l_row_id + dimension_query = "INSERT INTO km_fc_dimensions VALUES (?, ?, ?)" + for dim_id, dim_index in zip(summary['dimensions'], + range(len(summary['dimensions']))): + exe(dimension_query, (summary['id'], dim_id, dim_index)) + + def set_entry(self, summary, storage, temporary=False): + """Sets the meta data in the database according to the + summary dictionary. If the according entry already exists, + the database is not changed since the same emd5s should always + reference the same (meta)data. + Arguments: + - summary: dictionary with meta data + - storage: string type (e.g. path in local file system) + - temporary: Flag that marks data to be deleted upon next + call to setup_dbase(). + """ + if self.has_entry(summary['id']): + return + exe = self.cursor.execute + if temporary: + exe("INSERT INTO km_temporary VALUES (?)", + (summary['id'],)) + insert_dict = dict([(key, value) for key, value in \ + summary.iteritems() if key in \ + SQLiteWrapper.common_keys]) + insert_dict['storage'] = storage + insert_dict['date'] = date2dbase(insert_dict['date']) + if summary['id'] == 'IndexMarker': + type = 'fc' + else: + type = emd52type(summary['id']) + attr_query = "INSERT INTO km_attributes VALUES (?, ?, ?)" + for key, value in summary['attributes'].iteritems(): + assert isinstance(key, StringTypes) + exe(attr_query, (summary['id'], key, value.__repr__())) + if type == 'fc': + self._set_fc_keys(insert_dict, summary) + else: + insert_dict['sc_id'] = summary['id'] + column_query = "INSERT INTO km_sc_columns VALUES (?, ?, ?)" + for fc_id, fc_index in zip(summary['columns'], + range(len(summary['columns']))): + exe(column_query, (summary['id'], fc_id, fc_index)) + insert_query = "INSERT INTO km_%s %s VALUES %s" + value_list = [] + key_query = "(" + value_query = "(" + for key, value in insert_dict.iteritems(): + value_query += "?, " + key_query += key + ", " + value_list.append(value) + key_query = key_query[:-2] + ")" + value_query = value_query[:-2] + ")" + insert_query = insert_query % (type, key_query, value_query) + exe(insert_query, tuple(value_list)) + + def get_emd5_list(self): + self.cursor.execute("SELECT fc_id FROM km_fc") + emd5_list = self.cursor.fetchall() + self.cursor.execute("SELECT sc_id FROM km_sc") + emd5_list.extend(self.cursor.fetchall()) + return [row[0] for row in emd5_list] + + def verify_keys(self, keys, allowed): + for key in keys: + if not key in allowed: + raise KeyError(key) + + def translate_result_key(self, key, type): + if key == 'id': + return replace_type("%s_id", type) + elif key == 'type': + return "'%s' AS type" % type + else: + return key + + def translate_unit_search(self, value): + if isinstance(value, PhysicalQuantity): + value = value.unit.powers + elif isinstance(value, (IntType, LongType, FloatType)): + value = [0] * 10 + elif isinstance(value, PhysicalUnit): + value = value.powers + else: + raise ValueError(value) + expr = '(bu_id IN (SELECT bu_id FROM km_base_units WHERE '\ + 'm=? AND g=? AND s=? AND A=? AND K=? AND mol=? '\ + 'AND cd=? AND rad=? AND sr=? AND EUR=?))' + return (expr, value, True) + + def translate_attr_search(self, value, type): + expr = '(' + new_value = [] + for attr_key, attr_value in value.iteritems(): + if isinstance(attr_value, AnyValue): + value_expr = '' + new_value.append(attr_key) + else: + value_expr = ' AND value=?' + new_value.extend([attr_key, attr_value.__repr__()]) + expr += '(%s IN (SELECT dc_id FROM km_attributes '\ + 'WHERE key=?%s))' \ + % (replace_type('%s_id', type), value_expr) + expr += ' AND ' + expr = expr[:-5] + ')' + return (expr, new_value, True) + + def translate_list_search(self, key, value, type): + id_str = replace_type('%s_id', type) + if key == 'columns': + table = 'km_sc_columns' + index_str = 'fc_index' + lid_str = 'fc_id' + else: + table = 'km_fc_dimensions' + index_str = 'dim_index' + lid_str = 'dim_id' + qry = '(%s IN (SELECT %s FROM %s WHERE %s=? AND (%s IN (%s))))'\ + % (id_str, id_str, table, index_str, lid_str, '%s') + new_value = [] + expr = '(' + for fc_search_dict, fc_index in zip(value, range(len(value))): + fc_query, fc_values = self.get_andsearch_query( + 'field', ['id'], fc_search_dict, False) + expr += qry % (fc_query, ) + new_value.append(fc_index) + if fc_values != None: + new_value.extend(fc_values) + expr += ' AND ' + return (expr[:-5] + ')', new_value, True) + + def translate_search_dict(self, type, search_dict): + where = '' + values = [] + for key, value in search_dict.iteritems(): + extend = False + if key in self.one_to_one_search_keys: + expr = '%s=?' % key + elif key == 'id': + expr = '%s=?' % replace_type('%s_id', type) + elif key == 'date_from': + expr = 'date>=?' + value = date2dbase(value) + elif key == 'date_to': + expr = 'date<?' + value = date2dbase(value) + elif key == 'unit': + expr, value, extend = self.translate_unit_search(value) + elif key == 'attributes': + expr, value, extend = self.translate_attr_search(value, type) + elif key == 'columns' or key == 'dimensions': + expr, value, extend = self.translate_list_search( + key, value, type) + else: + raise NotImplementedError(key) + where += expr + " AND " + if extend: + values.extend(value) + else: + values.append(value) + return where[:-5], values + + def get_andsearch_query(self, type, result_keys, search_dict, distinct): + trans_res_keys = tuple([self.translate_result_key(key, type) \ + for key in result_keys]) + if type == 'field': + table = 'km_fc' + elif type == 'sample': + table = 'km_sc' + if search_dict == {}: + qry = "SELECT%s %s FROM %s " + values = None + else: + qry = "SELECT%s %s FROM %s WHERE " + if distinct: + dist_str = ' DISTINCT' + else: + dist_str = ' ALL' + qry = (qry % (dist_str, get_wildcards(len(trans_res_keys), + '%s'), table)) % trans_res_keys + if search_dict != {}: + where, values = self.translate_search_dict(type, search_dict) + qry += where + return qry, values + + def get_andsearch_result(self, result_keys, search_dict={}, + order_by=None, order_asc=True, + limit=-1, offset=0, distinct=False): + """returns a list of tuples filled with values of the result keys + matching the constraints of search_dict. + Arguments: + - result_keys: List (of length >= 1) of keys to include in the + result tuples. + - search_dict: Dict mapping keys to constraint values. + Use empty dict for no constraints at all + possible keys: values (used relational operator[, type constraint]): + 'longname': str types (==) + 'shortname': str types (==) + 'machine': str types (==) + 'creator: str types (==) + 'date_from:' str types: + YYYY[-MM[-DD[_hh:[mm:[ss[.s[s[s[s[s[s]]]]]]]]]]] (>=) + 'date_to:' str types: + YYYY[-MM[-DD[_hh:[mm:[ss[.s[s[s[s[s[s]]]]]]]]]]] (<) + 'hash': str types (==) + 'id': str types: emd5 (==) + 'type': 'field' or 'sample' (==) + 'attributes': dict mapping attr. key to attr. value (==) + use (SQLiteWrapper instance).any_value + or (KM instance).any_value to skip value check + 'storage': str types (==) + 'unit': PhysicalUnit or number or PhysicalQuantity (==, FC only) + 'dimensions': list of FC search dicts + (see above definitions, FC only) + 'columns': list of FC search dicts (see above definitions, SC only) + - order_by: element of result_keys to order the results by + or None for no special ordering + - order_asc: whether to order ascending + - limit: maximum number of results to return, + set to -1 for no limit, default: -1 + - offset: number of search results to skip, default: 0 + - distinct: flag that indicates whether the result list + should only contain distinct tuples. + Usage Examples: + Get list of all longnames: + get_andsearch_result(['longname'], distinct=True) + --> [('name1', ), ('name2', ), ...] + Get id and shortname of all FCs that are parametrized by + a time dimension along the primary axis: + tunit = PhysicalQuantity(1, 's') + get_andsearch_result(['id', 'shortname'], + {'type':'field', + 'dimensions':[{'unit':tunit}]}) + --> [('emd5_1', 'name_1'), ('emd5_2', 'name_2'), ...] + """ + if order_by == None: + order = '' + else: + assert order_by in result_keys + assert order_by in self.sortable_keys + order = ' ORDER BY %s' % order_by + if order_asc: + order += ' ASC' + else: + order += ' DESC' + assert isinstance(limit, int) + assert isinstance(offset, int) + if not search_dict.has_key('type'): + self.verify_keys(result_keys, self.common_result_keys) + self.verify_keys(search_dict.keys(), self.common_search_keys) + fc_query, fc_values \ + = self.get_andsearch_query('field', result_keys, + search_dict, distinct) + sc_query, sc_values \ + = self.get_andsearch_query('sample', result_keys, + search_dict, distinct) + if distinct: + dist_str = '' + else: + dist_str = ' ALL' + query = "%s UNION%s %s%s LIMIT %d OFFSET %d" + query = query % (fc_query, dist_str, sc_query, order, limit, offset) + if search_dict != {}: + values = fc_values + sc_values + else: + values = None + mod_search_dict = search_dict + else: + if search_dict['type'] == 'field': + allowed_search_keys = self.fc_search_keys + all... [truncated message content] |
From: <zk...@us...> - 2010-01-11 21:39:53
|
Revision: 662 http://pyphant.svn.sourceforge.net/pyphant/?rev=662&view=rev Author: zklaus Date: 2010-01-11 21:39:34 +0000 (Mon, 11 Jan 2010) Log Message: ----------- Merge branch 'master' into svn-trunk * master: Clutch: Ugly hack to allow for creator attributes in fmf files. Fix: Fixes a bug in an error message. Modified Paths: -------------- trunk/src/pyphant/pyphant/core/PyTablesPersister.py trunk/src/workers/fmfile/fmfile/FMFLoader.py Modified: trunk/src/pyphant/pyphant/core/PyTablesPersister.py =================================================================== --- trunk/src/pyphant/pyphant/core/PyTablesPersister.py 2009-12-15 18:20:40 UTC (rev 661) +++ trunk/src/pyphant/pyphant/core/PyTablesPersister.py 2010-01-11 21:39:34 UTC (rev 662) @@ -172,7 +172,9 @@ h5.setNodeAttr(resultGroup, "machine", result.machine.encode("utf-8")) for key,value in result.attributes.iteritems(): if key in _reservedAttributes: - raise ValueError, "Attribute should not be named %s!" % _reservedAttributes + raise ValueError("Attributes should not be named %s, " + "but one was in fact called %s!" + % (str(_reservedAttributes), key)) h5.setNodeAttr(resultGroup,key,value) #Store fields of sample Container and gather list of field IDs columns = [] Modified: trunk/src/workers/fmfile/fmfile/FMFLoader.py =================================================================== --- trunk/src/workers/fmfile/fmfile/FMFLoader.py 2009-12-15 18:20:40 UTC (rev 661) +++ trunk/src/workers/fmfile/fmfile/FMFLoader.py 2010-01-11 21:39:34 UTC (rev 662) @@ -242,7 +242,14 @@ newField.dimensions[dim]=independentFields[indepField] assert newField.isValid() containers.append(newField) - result = DataContainer.SampleContainer(containers,attributes=commonAttr) + #The next lines are a hack and should be dealt with properly... + if u'creator' in commonAttr.keys(): + creator = commonAttr[u'creator'] + del commonAttr[u'creator'] + result = DataContainer.SampleContainer(containers,attributes=commonAttr) + result.creator = creator + else: + result = DataContainer.SampleContainer(containers,attributes=commonAttr) return result def reshapeField(field): This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <zk...@us...> - 2010-02-01 17:45:07
|
Revision: 663 http://pyphant.svn.sourceforge.net/pyphant/?rev=663&view=rev Author: zklaus Date: 2010-02-01 17:45:00 +0000 (Mon, 01 Feb 2010) Log Message: ----------- Merge branch 'master' into svn-trunk * master: (35 commits) Enh: Exceptions now displayed in wxPyphant Enh: Saving recipes w/o results Enh: Added save as dialog to wxPyphant Enh: Added batch method to Helpers module Enh: Added sobel filter to NDImage Enh: Wrote XMLHandler Enh: Added preliminary attribute search Cosm: Nicer web interface Cosm: Nicer web interface Enh: Improved web interface Enh: Improved web interface Enh: Reimplemented simple web search (dirty) Fix: Adapted code to fit Python 2.5 and 2.6 Enh: Routed paste logger to pyphant logger Cosm: Nicer web interface Fix: Fixed syntax to fit python 2.5 Enh: Added log to web interface Fix: Bugfix for KN Fix: Bugfix for KN Enh: improved KN usage ... Modified Paths: -------------- trunk/src/pyphant/pyphant/core/H5FileHandler.py trunk/src/pyphant/pyphant/core/Helpers.py trunk/src/pyphant/pyphant/core/KnowledgeManager.py trunk/src/pyphant/pyphant/core/PyTablesPersister.py trunk/src/pyphant/pyphant/core/SQLiteWrapper.py trunk/src/pyphant/pyphant/core/WebInterface.py trunk/src/pyphant/pyphant/tests/TestH5FileHandler.py trunk/src/pyphant/pyphant/tests/TestSQLiteWrapper.py trunk/src/pyphant/pyphant/wxgui2/wxPyphantApplication.py trunk/src/workers/ImageProcessing/ImageProcessing/NDImageWorker.py Added Paths: ----------- trunk/src/pyphant/pyphant/core/KnowledgeNode.py trunk/src/pyphant/pyphant/core/RoutingHTTPServer.py trunk/src/pyphant/pyphant/core/XMLHandler.py trunk/src/pyphant/pyphant/core/bottle.py trunk/src/pyphant/pyphant/tests/TestKnowledgeNode.py trunk/src/pyphant/pyphant/web/ trunk/src/pyphant/pyphant/web/images/ trunk/src/pyphant/pyphant/web/images/disabled.gif trunk/src/pyphant/pyphant/web/images/favicon.ico trunk/src/pyphant/pyphant/web/images/offline.gif trunk/src/pyphant/pyphant/web/images/online.gif trunk/src/pyphant/pyphant/web/images/pyphant.png trunk/src/pyphant/pyphant/web/script/ trunk/src/pyphant/pyphant/web/script/LaTeXMathML.js trunk/src/pyphant/pyphant/web/templates/ trunk/src/pyphant/pyphant/web/templates/back.tpl trunk/src/pyphant/pyphant/web/templates/disabled.tpl trunk/src/pyphant/pyphant/web/templates/fieldcontainer.tpl trunk/src/pyphant/pyphant/web/templates/frontpage.tpl trunk/src/pyphant/pyphant/web/templates/htmlhead.tpl trunk/src/pyphant/pyphant/web/templates/log.tpl trunk/src/pyphant/pyphant/web/templates/mathhead.tpl trunk/src/pyphant/pyphant/web/templates/message.tpl trunk/src/pyphant/pyphant/web/templates/ordermathhead.tpl trunk/src/pyphant/pyphant/web/templates/remotes.tpl trunk/src/pyphant/pyphant/web/templates/samplecontainer.tpl trunk/src/pyphant/pyphant/web/templates/search.tpl Modified: trunk/src/pyphant/pyphant/core/H5FileHandler.py =================================================================== --- trunk/src/pyphant/pyphant/core/H5FileHandler.py 2010-01-11 21:39:34 UTC (rev 662) +++ trunk/src/pyphant/pyphant/core/H5FileHandler.py 2010-02-01 17:45:00 UTC (rev 663) @@ -44,9 +44,20 @@ import logging import os from pyphant.core import PyTablesPersister +from pyphant.core.DataContainer import IndexMarker +from pyphant.core.Helpers import (utf82uc, emd52dict) _logger = logging.getLogger("pyphant") +im = IndexMarker() +im_id = u"emd5://pyphant/pyphant/0001-01-01_00:00:00.000000/%s.field" \ + % utf82uc(im.hash) +im_summary = {'id':im_id, 'longname':utf82uc(im.longname), + 'shortname':utf82uc(im.shortname), 'hash':utf82uc(im.hash), + 'creator':u'pyphant', 'machine':u'pyphant', + 'date':u'0001-01-01_00:00:00.000000', + 'unit':1, 'dimensions':[im_id], 'attributes':{}} + class H5FileHandler(object): """ This class is used to handle IO operations on HDF5 files. @@ -157,12 +168,13 @@ currDcId = group._v_attrs.TITLE if len(currDcId) > 0: tmp = self.loadSummary(currDcId) - if tmp != u'IndexMarker': + if tmp == 'IndexMarker': + summary[im_id] = im_summary + else: summary[currDcId] = tmp elif self.isIndexMarker(dcId): return u'IndexMarker' else: - from pyphant.core.Helpers import (utf82uc, emd52dict) summary = {} summary['id'] = dcId resNode, uriType = self.getNodeAndTypeFromId(dcId) @@ -187,7 +199,7 @@ dimTable = resNode.dimensions def filterIndexMarker(emd5): if self.isIndexMarker(emd5): - return u'IndexMarker' + return im_id else: return emd5 dimensions = [filterIndexMarker(row['id']) \ Modified: trunk/src/pyphant/pyphant/core/Helpers.py =================================================================== --- trunk/src/pyphant/pyphant/core/Helpers.py 2010-01-11 21:39:34 UTC (rev 662) +++ trunk/src/pyphant/pyphant/core/Helpers.py 2010-02-01 17:45:00 UTC (rev 663) @@ -131,3 +131,47 @@ retdict['hash'] = emd5_split[5].split('.')[0] retdict['type'] = emd5_split[5].split('.')[1] return retdict + +def batch(recipe, input, plug, longname, dobatch=True, temporary=False): + """ + Runs the same recipe multiple times for different input data. + The return value is either a SampleContainer similar to input + with 'emd5' column replaced by results or the resulting + DataContainer from plug, if dobatch is set to False. + recipe -- CompositeWorker instance + input -- SampleContainer with 'emd5' column or any DataContainer if + dobatch is set to False + plug -- plug contained in recipe to get output from + (there has to be exactly one open socket in recipe + ascending from plug) + longname -- longname of resulting SampleContainer, works only for + dobatch == True + dobatch -- if set to False, input is treated as a single data source + temporary -- whether to register results temporarily, only applies when + dobatch is set to True + """ + socket = recipe.getOpenSocketsForPlug(plug)[0] + from tools import Emd5Src + DummyWorker = Emd5Src.Emd5Src() + socket.insert(DummyWorker.getPlugs()[0]) + DummyWorker.paramSelectby.value = u"enter emd5" + from pyphant.core.KnowledgeManager import KnowledgeManager + km = KnowledgeManager.getInstance() + if dobatch: + import copy + output = copy.deepcopy(input) + index = 0 + for emd5 in input['emd5'].data: + DummyWorker.paramEnteremd5.value = emd5 + resultDC = plug.getResult() + km.registerDataContainer(resultDC, temporary=temporary) + output['emd5'].data[index] = resultDC.id + index += 1 + output.longname = longname + output.seal() + else: + km.registerDataContainer(input) + DummyWorker.paramEnteremd5.value = input.id + output = plug.getResult() + socket.pullPlug() + return output Modified: trunk/src/pyphant/pyphant/core/KnowledgeManager.py =================================================================== --- trunk/src/pyphant/pyphant/core/KnowledgeManager.py 2010-01-11 21:39:34 UTC (rev 662) +++ trunk/src/pyphant/pyphant/core/KnowledgeManager.py 2010-02-01 17:45:00 UTC (rev 663) @@ -28,12 +28,12 @@ # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + from __future__ import with_statement """ -This module provides the KnowledgeManager class as well as some helper methods -for handling the .pyphant directory and some helper classes for the -KnowledgeManager class. +This module provides the KnowledgeManager class as well as some helper +classes. """ __id__ = "$Id$" @@ -42,32 +42,18 @@ # $Source: $ from pyphant.core.singletonmixin import Singleton -import urllib -import cgi import tempfile -import sys -import os, os.path +import os import logging -import threading -from uuid import uuid1 import re -import time -from urlparse import urlparse -from SimpleHTTPServer import SimpleHTTPRequestHandler -from pyphant.core.H5FileHandler import H5FileHandler -from pyphant.core.WebInterface import (HTTPAnswer, - WebInterface, - KMHTMLParser, - ThreadedHTTPServer) +from pyphant.core.H5FileHandler import (H5FileHandler, im_id) from fmfile import FMFLoader from pyphant.core.SQLiteWrapper import (SQLiteWrapper, AnyValue) +from pyphant.core.Helpers import getPyphantPath +from uuid import uuid1 +from urlparse import urlparse +import urllib -WAITING_SECONDS_HTTP_SERVER_STOP = 5 -HTTP_REQUEST_DC_URL_PATH = "/request_dc_url" -HTTP_REQUEST_KM_ID_PATH = "/request_km_id" -HTTP_REQUEST_DC_DETAILS_PATH = "/request_dc_details?dcid=" -HTTP_REQUEST_REGISTER_KM = "/request_register_km" -HTTP_REQUEST_SEARCH = "/request_search" # Limit for sum(DC.rawDataBytes) for DC in cache: CACHE_MAX_SIZE = 256 * 1024 * 1024 # Limit for number of stored DCs in cache: @@ -76,29 +62,6 @@ REHDF5 = re.compile(r'..*\.h5$|..*\.hdf$|..*\.hdf5$') REFMF = re.compile(r'..*\.fmf$') -def getPyphantPath(subdir = '/'): - """ - returns full pyphant path with optional subdirectory - subdir -- subdirectory that is created if it does not exist already, - recursive creation of directories is supported also. - """ - homedir = os.path.expanduser('~') - if not subdir.startswith('/'): - subdir = '/' + subdir - if not subdir.endswith('/'): - subdir = subdir + '/' - if homedir == '~': - homedir = os.getcwdu() - plist = ('/.pyphant' + subdir).split('/') - makedir = homedir - path = homedir + '/.pyphant' + subdir - for p in plist: - if p != '': - makedir += "/%s" % (p, ) - if not os.path.isdir(makedir): - os.mkdir(makedir) - return path - def getFilenameFromDcId(dcId, temporary=False): """ Returns a unique filename for the given emd5. @@ -118,33 +81,14 @@ return getPyphantPath(KM_PATH + subdir + directory) + filename -class KnowledgeManagerException(Exception): - """ - Exception class that is able to store parent exceptions. - """ - def __init__(self, message, parent_excep = None, *args, **kwds): - """ - message -- human readable reason for the exception - parent_excep -- exception that is the reason for throwing this one. - """ - super(KnowledgeManagerException, self).__init__(message, *args, **kwds) - self._message = message - self._parent_excep = parent_excep +class DCNotFoundError(Exception): + pass - def __str__(self): - """ - Returns error message with reason from parent exception. - """ - return self._message + " (reason: %s)" % (str(self._parent_excep), ) - def getParentException(self): - """ - Returns the parent exception - """ - return self._parent_excep - - class CachedDC(object): + """ + Class representing a cached DataContainer + """ def __init__(self, dc_ref): self.id = dc_ref.id self.ref = dc_ref @@ -155,6 +99,9 @@ class TestCachedDC(object): + """ + Class representing a cache lookup + """ def __init__(self, dc_id): self.id = dc_id @@ -162,80 +109,73 @@ return self.id == other.id +KM_DBASE = u'default' # modify for debug purposes + + class KnowledgeManager(Singleton): """ Knowledge Manager for Pyphant ============================= - The ID of a DataContainer object is given by a emd5 string. + The ID of a DataContainer object is given by an emd5 string. Responsibilities: ----------------- - - register HDF5 files by their URLs - - register remote knowledge managers by urls - - share data containers via HTTP, they are requested by id - - get references for these data containers (local or remote) - If an operation fails, a KnowledgeManagerException - will be raised. These exceptions have a method - .getParentException() - in order to get additional information about the reason. - Usage: - ------ + - Manage local storage of DataContainers + - Resolve IDs to DC instances + - Communicate with a KnowledgeNode in order to share Knowledge + among other KM instances via HTTP. + - Manage and search meta data for DataContainers + Usage examples: + --------------- Get a reference to the KnowledgeManager instance, which is a singleton: - import pyphant.core.KnowledgeManager as KM - km = KM.KnowledgeManager.getInstance() - Optionally: Start HTTP server for sharing data with others by - km.startServer(<host>,<port>) + from pyphant.core.KnowledgeManager import KnowledgeManager + km = KnowledgeManager.getInstance() Register a local HDF5 file: + km.registerURL("/some_directory/data.h5") + Register and persist a local HDF5 file: km.registerURL("file:///tmp/data.h5") - Register a remote HDF5 file: - km.registerURL("http://example.com/repository/data.h5") - Register another KnowledgeManager in order to benefit - from their knowledge (see arguments of .startServer): - km.registerKnowledgeManager("http://example.com", 8000, True) - Request data container by its id: - dc = km.getDataContainer(id) - Use the data container! + Register and persist a remote FMF file: + km.registerURL("http://example.com/repository/data.fmf") + Request DataContainer by its id: + dc = km.getDataContainer(id) # `dc` is an actual instance now + For searching meta data see docstring of KM.search() below. + How to share Knowledge: + Hook up the KM to a KnowledgeNode, see documentation in the + KnowledgeNode module. + Known issues: + ------------ + KM is NOT thread-safe yet. """ def __init__(self): """ - Sets the unique id for the KM instance, sets up the DataBase if - it has not been initialized yet and clears the tmp dir. + Sets up the DataBase if it has not been initialized yet, + sets up the cache and clears the tmp dir. + Sets a uuid to identify the instance. """ super(KnowledgeManager, self).__init__() - self._logger = logging.getLogger("pyphant") + self.logger = logging.getLogger("pyphant") self._cache = [] self._cache_size = 0 - self.H5FileHandlers = {} - self._remoteKMs = {} # key:id, value:url - self._server = None - self._server_id = uuid1() - self.web_interface = WebInterface(self, True) - self.dbase = getPyphantPath('/sqlite3/') + "km_meta.sqlite3" + if KM_DBASE == u'default': + self.dbase = getPyphantPath('/sqlite3/') + "km_meta.sqlite3" + else: + self.dbase = KM_DBASE self.any_value = AnyValue() with SQLiteWrapper(self.dbase) as wrapper: wrapper.setup_dbase() + self.node = None # for hooking up a KnowledgeNode + self.uuid = uuid1().urn tmpdir = getPyphantPath(KM_PATH + 'tmp/') if os.path.isdir(tmpdir): from shutil import rmtree try: rmtree(tmpdir) except OSError: - print "Could not delete '%s'." % (tmpdir, ) + self.logger.warn("Could not delete '%s'." % tmpdir) - def tearDown(self): - """ - Stops the HTTP server - """ - if self.isServerRunning(): - self.stopServer() - def hasDataContainer(self, dcid): """ Returns whether the given DC is stored locally. - Never use this method in a 'with SQLiteWrapper(...) as wrapper' - statement! Use wrapper.has_entry(dcid) instead if you already - have a wrapper at your hands or you may end up in a sqlite3 locking - loop. """ with SQLiteWrapper(self.dbase) as wrapper: has_entry = wrapper.has_entry(dcid) @@ -263,135 +203,11 @@ summaryDict = h5fh.loadSummary() with SQLiteWrapper(self.dbase) as wrapper: for dcId, summary in summaryDict.items(): - if not wrapper.has_entry(dcId): + if dcId == im_id: + wrapper.set_entry(summary, None, temporary) + else: wrapper.set_entry(summary, filename, temporary) - def _getServerURL(self): - """ - Returns the URL of the HTTP server. - """ - if self._server is None: - return None - return "http://%s:%d" % (self._http_host, self._http_port) - - def getServerId(self): - """ - Returns uniqe id of the KnowledgeManager as uuid URN. - """ - return self._server_id.urn - - def startServer(self, host = '127.0.0.1', port = 8000, - provide_web_frontend = False): - """ - Starts the HTTP server. When the server was running already, - it is restartet with the new parameters. - A temporary directory is generated in order to - save temporary HDF5 files. - The data may be announced to other KnowledgeManagers. - host -- full qualified domain name or IP address under which - server can be contacted via HTTP, default: '127.0.0.1' - port -- port of HTTP server (integer), default: 8000 - provide_web_frontend -- whether to provide web frontend, default: False - """ - logger = self._logger - if self.isServerRunning(): - logger.warn("Server is running at host %s, port %d already. " - "Stopping server...", self._http_host, self._http_port) - self.stopServer() - self._http_host = host - self._http_port = port - self._http_dir = tempfile.mkdtemp(prefix = 'pyphant-knowledgemanager') - self._server = ThreadedHTTPServer((host, port), KMHTTPRequestHandler) - class _HTTPServerThread(threading.Thread): - def run(other): - self._server.start() - self._http_server_thread = _HTTPServerThread() - self._http_server_thread.start() - self._logger.debug("Started HTTP server. Host: %s, port: %d, " - "temp dir: %s", host, port, self._http_dir) - self.web_interface.disabled = not provide_web_frontend - - def stopServer(self): - """ - Stops the HTTP server. The temporary directory is removed. - """ - logger = self._logger - if self.isServerRunning(): - self.web_interface.disabled = True - self._server.stop_server = True - # do fake request - try: - urllib.urlopen(self._getServerURL()) - except: - logger.warn("Fake HTTP request failed when stopping HTTP " - "server.") - logger.info("Waiting for HTTP server thread to die...") - self._http_server_thread.join(WAITING_SECONDS_HTTP_SERVER_STOP) - if self._http_server_thread.isAlive(): - logger.warn("HTTP server thread could not be stopped.") - else: - logger.info("HTTP server has been stopped.") - self._server = None - self._http_host = None - self._http_port = None - try: - logger.debug("Deleting temporary directory '%s'..", - self._http_dir) - os.removedirs(self._http_dir) - except Exception: - logger.warn("Failed to delete temporary directory '%s'.", - self._http_dir) - self._http_dir = None - else: - self._logger.warn("HTTP server should be stopped but isn't " - "running.") - - def isServerRunning(self): - """ - Returns whether HTTP server is running. - """ - return self._server is not None - - def registerKnowledgeManager(self, host, port = 8000, - share_knowledge = False): - """ - Registers a knowledge manager. The remote KnowledgeManager is - contacted immediately in order to save its unique ID. - host -- full qualified domain name or IP address at which - server can be contacted via HTTP - port -- port of HTTP server (integer), default: 8000 - share_knowledge -- local knowledge is made available to the remote KM - when set to True and the HTTP server is running at - the local KM, default: False - """ - logger = self._logger - try: - km_url = "http://%s:%d" % (host, port) - # get unique id from KM via HTTP - logger.debug("Requesting ID from Knowledgemanager with URL '%s'...", - km_url) - # request url for given id over http - local_km_host = '' - local_km_port = '' - if self.isServerRunning() and share_knowledge: - local_km_host = self._http_host - local_km_port = str(self._http_port) - post_data = urllib.urlencode({'kmhost':local_km_host, - 'kmport':local_km_port}) - answer = urllib.urlopen(km_url + HTTP_REQUEST_KM_ID_PATH, post_data) - logger.debug("Info from HTTP answer: %s", answer.info()) - htmltext = answer.read() - parser = KMHTMLParser() - parser.feed(htmltext) - km_id = parser.headitems['pyphant']['kmid'].strip() - answer.close() - logger.debug("KM ID read from HTTP answer: %s", km_id) - except Exception, excep: - raise KnowledgeManagerException( - "Couldn't get ID for knowledge manager under URL %s." - % (km_url, ), excep) - self._remoteKMs[km_id] = km_url - def registerURL(self, url, temporary=False): """ Registers an HDF5 or FMF file downloadable from given URL and stores it @@ -420,7 +236,8 @@ fnwoext = '' for part in split: fnwoext += (part + '.') - while i < sys.maxint: + from sys import maxint + while i < maxint: fill = str(i).zfill(10) tryfn = "%s/%s%s.%s" % (directory, fnwoext, fill, ext) if os.path.exists(tryfn): @@ -428,17 +245,17 @@ else: filename = tryfn break - self._logger.info("Retrieving url '%s'..." % (url, )) - self._logger.info("Using local file '%s'." % (filename, )) + self.logger.info("Retrieving url '%s'..." % url) + self.logger.info("Using local file '%s'." % filename) savedto, headers = urllib.urlretrieve(url, filename) - self._logger.info("Header information: %s", (str(headers), )) if REFMF.match(filename.lower()) != None: self.registerFMF(filename, temporary) elif REHDF5.match(filename.lower()) != None: self.registerH5(filename, temporary) else: - raise KnowledgeManagerException('Filetype unknown: %s' - % (filename, )) + msg = "Could not guess type of '%s'" % url + self.logger.error(msg) + raise ValueError(msg) def registerDataContainer(self, dc, temporary=False): """ @@ -455,8 +272,9 @@ your hard drive. """ if dc.id == None: - raise KnowledgeManagerException("Invalid id for DataContainer '"\ - + dc.longname + "'") + msg = "Missing id for DataContainer. DC has not been sealed." + self.logger.error(msg) + raise ValueError(msg) if not self.hasDataContainer(dc.id): filename = getFilenameFromDcId(dc.id, temporary) handler = self.getH5FileHandler(filename, 'w') @@ -476,101 +294,6 @@ self.registerDataContainer(sc, temporary) return sc.id - def _getDCURLFromRemoteKMs(self, query_dict): - """ - Returns URL for a DataContainer by requesting remote - KnowledgeManagers. - query_dict -- see _getDataContainerURL - """ - logger = self._logger - # add this KM to query - query_dict['lastkmidindex'] += 1 - query_dict['kmid%d' % query_dict['lastkmidindex']] = self.getServerId() - # ask every remote KnowledgeManager for id - logger.debug("Requesting knowledge managers for DC id '%s'..." - % (query_dict['dcid'], )) - dc_url = None - for km_id, km_url in self._remoteKMs.iteritems(): - if not (km_id in query_dict.values()): #<-- TODO: exclude wrong keys - logger.debug("Requesting Knowledgemanager with ID '%s' and \ -URL '%s'...", km_id, km_url) - # request url for given id over http - try: - data = urllib.urlencode(query_dict) - logger.debug("URL encoded query: %s", data) - answer = urllib.urlopen(km_url + HTTP_REQUEST_DC_URL_PATH, - data) - code = int(answer.headers.dict['code']) - if code < 400: - parser = KMHTMLParser() - htmltext = answer.read() - parser.feed(htmltext) - dc_url = parser.headitems['pyphant']['hdf5url'].strip() - logger.debug("URL for id read from HTTP answer: %s", - dc_url) - break - elif code == 404: - # update query_dict: - parser = KMHTMLParser() - htmltext = answer.read() - parser.feed(htmltext) - query_dict.clear() - for k in parser.headitems['pyphant']: - query_dict[k] =\ - parser.headitems['pyphant'][k].strip() - query_dict['lastkmidindex']\ - = int(query_dict['lastkmidindex']) - logger.debug("Code 404 from '%s', updated query: %s", - km_url, str(query_dict)) - else: - # message for everyone: do not ask this KM again - query_dict['lastkmidindex'] += 1 - query_dict['kmid%d' % (query_dict['lastkmidindex'], )]\ - = km_id - except: - logger.debug("Could not contact KM with ID '%s'", km_id) - # message for everyone: do not ask this KM again - query_dict['lastkmidindex'] += 1 - query_dict['kmid%d' % (query_dict['lastkmidindex'], )]\ - = km_id - finally: - answer.close() - return dc_url - - def _getDataContainerURL(self, query_dict): - """ - Returns a URL from which a DataContainer can be downloaded. - The server must be running before calling this method. - query_dict -- dict of DC ID to get and KnowledgeManager IDs - which shouldn't be asked. - e.g.: {'dcid':'somedcid', - 'lastkmidindex:1', - 'kmid0':'someid', - 'kmid1':'anotherid'} - query_dict is extended by this method in order to - exclude KMs recursively. - """ - assert self.isServerRunning(), "Server is not running." - dc_id = query_dict['dcid'] - if self.hasDataContainer(dc_id): - dc = self.getDataContainer(dc_id, True, False) - # Wrap data container in temporary HDF5 file - osFileId, filename = tempfile.mkstemp(suffix = '.h5', - prefix = 'dcrequest-', - dir = self._http_dir) - os.close(osFileId) - handler = H5FileHandler(filename, 'w') - with handler: - handler.saveDataContainer(dc) - dc_url = self._getServerURL() + "/" + os.path.basename(filename) - else: - try: - dc_url = self._getDCURLFromRemoteKMs(query_dict) - except Exception, excep: - raise KnowledgeManagerException( - "URL for DC ID '%s' not found." % (dc_id, ), excep) - return dc_url - def getDCFromCache(self, dc_id, filename): """ Returns a DC instance from cache or local storage. @@ -606,7 +329,7 @@ self._cache.append(cache_item) self._cache_size += cache_item.size - def getDataContainer(self, dc_id, use_cache = True, try_remote = True): + def getDataContainer(self, dc_id, use_cache=True, try_remote=True): """ Returns DataContainer matching the given id. dc_id -- Unique ID of the DataContainer (emd5) @@ -626,21 +349,14 @@ with self.getH5FileHandler(filename) as handler: dc = handler.loadDataContainer(dc_id) return dc - elif try_remote: - dc_url = self._getDCURLFromRemoteKMs({'dcid':dc_id, - 'lastkmidindex':-1}) - if dc_url == None: - raise KnowledgeManagerException("DC ID '%s' is unknown." - % (dc_id,)) - filename = getFilenameFromDcId(dc_id) - urllib.urlretrieve(dc_url, filename) - self.registerH5(filename) - with self.getH5FileHandler(filename) as handler: - dc = handler.loadDataContainer(dc_id) - return dc - else: - raise KnowledgeManagerException("DC ID '%s' is unknown." - % (dc_id, )) + elif try_remote and self.node != None: + try: + return self.node.get_datacontainer(dc_id) + except DCNotFoundError: + pass + msg = "Could not find DC with id '%s'." % dc_id + self.logger.error(msg) + raise DCNotFoundError(msg) def getEmd5List(self): """ @@ -652,7 +368,52 @@ def search(self, result_keys, search_dict={}, order_by=None, order_asc=True, limit=-1, offset=0, distinct=False): """ - See SQLiteWrapper.get_andsearch_result() + returns a list of tuples filled with values of the result keys + matching the constraints of search_dict. + Arguments: + - result_keys: List (of length >= 1) of keys to include in the + result tuples. + - search_dict: Dict mapping keys to constraint values. + Use empty dict for no constraints at all + possible keys: values (used relational operator[, type constraint]): + 'longname': str types (==) + 'shortname': str types (==) + 'machine': str types (==) + 'creator: str types (==) + 'date_from:' str types: + YYYY[-MM[-DD[_hh:[mm:[ss[.s[s[s[s[s[s]]]]]]]]]]] (>=) + 'date_to:' str types: + YYYY[-MM[-DD[_hh:[mm:[ss[.s[s[s[s[s[s]]]]]]]]]]] (<) + 'hash': str types (==) + 'id': str types: emd5 (==) + 'type': 'field' or 'sample' (==) + 'attributes': dict mapping attr. key to attr. value (==) + use (SQLiteWrapper instance).any_value + or (KM instance).any_value to skip value check + 'storage': str types (==) + 'unit': PhysicalUnit or number or PhysicalQuantity (==, FC only) + 'dimensions': list of FC search dicts + (see above definitions, FC only) + 'columns': list of FC search dicts (see above definitions, SC only) + - order_by: element of result_keys to order the results by + or None for no special ordering + - order_asc: whether to order ascending + - limit: maximum number of results to return, + set to -1 for no limit, default: -1 + - offset: number of search results to skip, default: 0 + - distinct: flag that indicates whether the result list + should only contain distinct tuples. + Usage Examples: + Get list of all longnames: + get_andsearch_result(['longname'], distinct=True) + --> [('name1', ), ('name2', ), ...] + Get id and shortname of all FCs that are parametrized by + a time dimension along the primary axis: + tunit = PhysicalQuantity(1, 's') + get_andsearch_result(['id', 'shortname'], + {'type':'field', + 'dimensions':[{'unit':tunit}]}) + --> [('emd5_1', 'name_1'), ('emd5_2', 'name_2'), ...] """ with SQLiteWrapper(self.dbase) as wrapper: return wrapper.get_andsearch_result( @@ -665,7 +426,6 @@ the given DC. """ with SQLiteWrapper(self.dbase) as wrapper: - # TODO: usage of rowwrapper is not optimal in performance rowwrapper = wrapper[dc_id] keys = list(SQLiteWrapper.all_keys) if dc_id.endswith('field'): @@ -675,184 +435,3 @@ keys.remove('dimensions') summary = dict([(key, rowwrapper[key]) for key in keys]) return summary - - -class KMHTTPRequestHandler(SimpleHTTPRequestHandler): - """ - Helper class for KnowledgeManager that handles HTTP requests. - """ - _km = KnowledgeManager.getInstance() - _logger = logging.getLogger("pyphant") - def send_response(self, code, message = None): - """ - Sends HTTP status code and an optional message via HTTP headers. - code -- HTTP status code e.g. 404: File not found - message -- optional reason for the given code - """ - self.log_request(code) - if message is None: - if code in self.responses: - message = self.responses[code][0] - else: - message = '' - if self.request_version != 'HTTP/0.9': - self.wfile.write("%s %d %s\r\n" % (self.protocol_version, code, - message)) - self.send_header('Server', self.version_string()) - self.send_header('Date', self.date_time_string()) - #for older versions of urllib.urlopen which do not support - #.getcode() method - self.send_header('code', str(code)) - - def do_POST(self): - """ - Handles HTTP POST requests. - """ - self._logger.debug("POST request from client (host,port): %s", - self.client_address) - self._logger.debug("POST request path: %s", self.path) - if self.path == HTTP_REQUEST_DC_URL_PATH: - httpanswer = self._do_POST_request_dc_url() - elif self.path == HTTP_REQUEST_KM_ID_PATH: - httpanswer = self._do_POST_request_km_id() - elif self.path == HTTP_REQUEST_REGISTER_KM: - httpanswer = self._do_POST_request_register_km() - else: - code = 400 - message = "Unknown request path '%s'." % (self.path, ) - httpanswer = HTTPAnswer(code, message) - httpanswer.sendTo(self) - - def _do_POST_request_km_id(self): - """ - Returns the KnowledgeManager ID via HTTP in the HTML head as - "<pyphant kmid = '...'>". - """ - km = KMHTTPRequestHandler._km - if self.headers.has_key('content-length'): - length = int( self.headers['content-length'] ) - query = self.rfile.read(length) - query_dict = cgi.parse_qs(query) - remote_host = '' - remote_port = '' - try: - remote_host = query_dict['kmhost'][0] - remote_port = query_dict['kmport'][0] - except: - #self._logger.info("Remote knowledge is not being shared.") - pass - if remote_host != '' and remote_port != '': - km.registerKnowledgeManager(remote_host, int(remote_port), - False) - self._logger.debug("Returning ID '%s'...", km.getServerId()) - return km.web_interface.get_kmid(km.getServerId()) - - def _do_POST_request_dc_url(self): - """ - Returns an URL for a given DataContainer ID via HTTP in the HTML head as - "<pyphant hdf5url = '...'>". - """ - if self.headers.has_key('content-length'): - length = int( self.headers['content-length'] ) - query = self.rfile.read(length) - tmp_dict = cgi.parse_qs(query) - query_dict = dict([(k, v[0]) for (k, v) in tmp_dict.items()\ - if (k.startswith('kmid')\ - or k == 'lastkmidindex' or k =='dcid')]) - query_dict['lastkmidindex'] = int(query_dict['lastkmidindex']) - self._logger.debug("Query dict: %s", str(query_dict)) - try: - km = KMHTTPRequestHandler._km - redirect_url = km._getDataContainerURL(query_dict) - if redirect_url != None: - self._logger.debug("Returning URL '%s'...", redirect_url) - httpanswer = km.web_interface.get_kmurl(redirect_url, - query_dict['dcid']) - else: - self._logger.debug("Returning Error Code 404: \ -DataContainer ID '%s' not found.", query_dict['dcid']) - httpanswer = km.web_interface.get_updatequery(query_dict) - except Exception, excep: - self._logger.warn("Caught exception: %s", excep.message) - httpanswer = km.web_interface.get_internalerror(excep) - else: - httpanswer = HTTPAnswer(400, "Cannot interpret query.") - return httpanswer - - def _do_POST_request_register_km(self): - length = int( self.headers['content-length'] ) - query = self.rfile.read(length) - dict = cgi.parse_qs(query) - km = KMHTTPRequestHandler._km - host = dict['remote_km_host'][0] - port = int(dict['remote_km_port'][0]) - try: - km.registerKnowledgeManager(host, port, False) - except KnowledgeManagerException: - emsg = "Host '%s:%d' is not a KnowledgeManager." - return HTTPAnswer(400, emsg % (host, port)) - return km.web_interface.get_frontpage("/") - - def do_GET(self): - """ - Returns a requested HDF5 from temporary directory or the web frontend - if the given request path was located outside the servers tmp directory. - """ - log = self._logger - km = KMHTTPRequestHandler._km - if self.path == '/' or self.path.startswith('/../') or \ - self.path.startswith('/?'): - km.web_interface.get_frontpage(self.path).sendTo(self) - elif self.path.startswith(HTTP_REQUEST_DC_DETAILS_PATH): - km.web_interface.get_details(self.path).sendTo(self) - elif self.path.startswith(HTTP_REQUEST_SEARCH): - km.web_interface.get_search(self.path).sendTo(self) - else: - f = self.send_head() - if f: - self.copyfile(f, self.wfile) - f.close() - try: - log.debug("Trying to remove temporary file '%s'..", f.name) - os.remove(f.name) - except Exception: - log.warn("Cannot delete temporary file '%s'.", f.name) - - def send_head(self): # see SimpleHTTPServer.SimpleHTTPRequestHandler - """ - Sends HTTP headers for HDF5 file requests. - """ - log = self._logger - km = KMHTTPRequestHandler._km - source_dir = km._http_dir # this is intended - log.debug("HTTP GET request: Reading files from directory '%s'..", - source_dir) - try: - # build filename, remove preceding '/' in path - filename = os.path.join(source_dir, self.path[1:]) - log.debug("Returning file '%s' as answer for HTTP request..", - filename) - f = open(filename, 'rb') - except IOError: - self.send_error(404, "File not found") - return None - self.send_response(200) - #self.send_header("Content-type", "application/x-hdf") - fs = os.fstat(f.fileno()) - self.send_header("Content-Length", str(fs[6])) - self.send_header("Last-Modified", self.date_time_string(fs.st_mtime)) - self.end_headers() - return f - - -def _enableLogging(): - """ - Enables logging to stdout for debug purposes. - """ - l = logging.getLogger("pyphant") - l.setLevel(logging.DEBUG) - f = logging.Formatter('%(asctime)s [%(name)s|%(levelname)s] %(message)s') - h = logging.StreamHandler(sys.stderr) - h.setFormatter(f) - l.addHandler(h) - l.info("Logger 'pyphant' has been configured for debug purposes.") Added: trunk/src/pyphant/pyphant/core/KnowledgeNode.py =================================================================== --- trunk/src/pyphant/pyphant/core/KnowledgeNode.py (rev 0) +++ trunk/src/pyphant/pyphant/core/KnowledgeNode.py 2010-02-01 17:45:00 UTC (rev 663) @@ -0,0 +1,411 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) 2006-2008, Rectorate of the University of Freiburg +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the Freiburg Materials Research Center, +# University of Freiburg nor the names of its contributors may be used to +# endorse or promote products derived from this software without specific +# prior written permission. +# +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER +# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +from __future__ import with_statement + +""" +This module provides the KnowledgeNode class which is used as an +HTTP communication channel between one local KnowledgeManager and +arbitrary many remote KnowledgeManagers. It comes with a RoutingHTTPServer +and an optional WebInterface. +""" + +__id__ = "$Id$" +__author__ = "$Author$" +__version__ = "$Revision$" +# $Source: $ + +from pyphant.core.RoutingHTTPServer import (RoutingHTTPServer, + UnreachableError) +import sqlite3 +from pyphant.core.Helpers import getPyphantPath +from pyphant.core.SQLiteWrapper import create_table +from time import time +from urllib2 import (urlopen, URLError, HTTPError) +from urllib import urlencode +import logging +from pyphant.core.KnowledgeManager import (DCNotFoundError, KnowledgeManager) +from pyphant.core.bottle import (request, send_file) +try: + from json import (dumps, load, loads) +except ImportError: + from simplejson import (dumps, load, loads) +from tempfile import (mkdtemp, mkstemp) +import os +from pyphant import __path__ as pyphant_source_path +import pyphant.core.bottle + + +class SkipError(Exception): + pass + + +class RemoteError(Exception): + pass + + +class RemoteKN(object): + """ + This class represents a remote KnowledgeNode. + """ + + status_dict = {0:'offline', 1:'online', 2:'disabled'} + + def __init__(self, host, port, status=1, timeout=300.0): + """ + Arguments: + - `host`: hostname + - `port`: port + - `status`: 0: offline, may get online after timeout + 1: online, may get offline anytime + 2: disabled, use enable() to enable + - `timeout`: refresh interval for status lookup when offline + default: 5 min + """ + self.host = host + self.port = port + self.url = "http://%s:%d/" % (host, port) + self.timeout = timeout + self.last_update = None + self.uuid = None + self._status = status + self.logger = logging.getLogger('pyphant') + self.update_status() + + def __eq__(self, other): + if not isinstance(other, RemoteKN): + return False + else: + return self.host == other.host and self.port == other.port + + def _get_status(self): + return self.status_dict[self._status] + status = property(_get_status) + + def enable(self): + self._status = 0 + self.update_status() + + def disable(self): + self.last_update = None + self.uuid = None + self._status = 2 + + def update_status(self): + if self._status == 2: + return + elif self.last_update == None or self._status == 1: + self.connect() + else: + if time() - self.last_update > self.timeout: + self.connect() + + def connect(self): + stream = None + try: + try: + stream = urlopen(self.url + 'uuid/', timeout=3.0) + except TypeError: + stream = urlopen(self.url + 'uuid/') + line = stream.readline() + if line.startswith('urn:uuid:'): + self._status = 1 + self.uuid = line + else: + self._status = 0 + self.logger.error("Remote KM '%s' returned broken uuid: '%s'" \ + % (self.url, line)) + except (URLError, IOError, HTTPError): + self._status = 0 + self.logger.warn("Remote KM '%s' is not responding." % self.url) + finally: + if stream != None: + stream.close() + self.last_update = time() + + def get_datacontainer_url(self, dc_id, skip): + self.update_status() + if self._status == 1: + if self.uuid in skip: + raise SkipError() + else: + try: + query = urlencode({'skip':dumps(skip), 'dc_id':dc_id}) + url = '%sget_dc_url/?%s' % (self.url, query) + try: + stream = urlopen(url, timeout=60.0) + except TypeError: + stream = urlopen(url) + assert stream.headers.type == 'application/json' + answer = load(stream) + stream.close() + if answer['dc_url'] == None: + raise DCNotFoundError + assert len(answer['skip']) >= len(skip) + return answer['dc_url'], answer['skip'] + except (URLError, HTTPError, IOError, AssertionError): + raise UnreachableError() + else: + raise UnreachableError() + + +class KnowledgeNode(RoutingHTTPServer): + """ + This class manages communication between one local and arbitrary many + remote KM instances. + """ + + def __init__(self, local_km=None, + host=u'127.0.0.1', port=8080, start=False, + web_interface=False, dbase=u'default'): + """ + Arguments: + - `local_km`: Local KnowledgeManager instance to hook up to. + If set to `None`, KnowledgeManager.getInstance() is used. + - `host`: hostname to listen on + - `port`: port to listen on + - `start`: flag that indicates whether to start the server + - `web_interface`: flag that indicates whether to enable + the web interface. You can enable/disable it anytime by + setting (KN instance).web_interface.enabled to `True`/`False`. + - `dbase`: leave this to 'default', other values are allowed for + debug purposes + """ + RoutingHTTPServer.__init__(self, host, port, start) + if local_km == None: + local_km = KnowledgeManager.getInstance() + self.km = local_km + self.remotes = [] + if dbase == u'default': + self._dbase = getPyphantPath('/sqlite3/') + 'kn_remotes.sqlite3' + else: + self._dbase = dbase + self._restore_remotes() + self._setup_routes() + self._tempdir = mkdtemp(prefix = 'HDF5Wrap') + tpl_path = pyphant_source_path[0] + '/web/templates/' + if not tpl_path in pyphant.core.bottle.TEMPLATE_PATH: + pyphant.core.bottle.TEMPLATE_PATH.append(tpl_path) + from pyphant.core.WebInterface import WebInterface + self.web_interface = WebInterface(self, web_interface) + self.km.node = self + + def _restore_remotes(self): + """ + Loads remotes from dbase. + """ + connection = sqlite3.connect(self._dbase) + cursor = connection.cursor() + try: + columns = [('host', 'TEXT'), ('port', 'INT'), ('status', 'INT'), + ('', 'UNIQUE(host, port)')] + create_table('kn_remotes', columns, cursor) + cursor.execute("SELECT * FROM kn_remotes") + self.remotes = [RemoteKN(host, port, status) \ + for host, port, status in cursor] + connection.commit() + finally: + cursor.close() + connection.close() + + def _setup_routes(self): + self.app.add_route('/uuid/', self.get_uuid) + self.app.add_route('/get_dc_url/', self.handle_datacontainer_url) + self.app.add_route(r'/wrapped/:filename#..*\.hdf$#', + self.handle_wrapped) + + def stop(self): + RoutingHTTPServer.stop(self) + if not hasattr(self, '_tempdir'): + return + if os.path.isdir(self._tempdir): + from shutil import rmtree + try: + rmtree(self._tempdir) + except OSError: + km.logger.warn("Could not delete '%s'." % self._tempdir) + + def register_remote(self, host, port): + host = host.lower() + port = int(port) + connection = sqlite3.connect(self._dbase) + cursor = connection.cursor() + error = None + try: + try: + cursor.execute("INSERT OR ABORT INTO kn_remotes "\ + "(host, port, status) "\ + "VALUES (?, ?, ?)", (host, port, 0)) + self.remotes.append(RemoteKN(host, port)) + except sqlite3.IntegrityError: + error = RemoteError("Remote '%s:%d' already registered." \ + % (host, port)) + connection.commit() + finally: + cursor.close() + connection.close() + if not error is None: + raise error + + def remove_remote(self, host, port): + host = host.lower() + port = int(port) + dummy = RemoteKN(host, port, status=2) + try: + self.remotes.remove(dummy) + except ValueError: + raise RemoteError("Remote '%s:%d' is not registered." \ + % (host, port)) + connection = sqlite3.connect(self._dbase) + cursor = connection.cursor() + try: + cursor.execute("DELETE FROM kn_remotes "\ + "WHERE host=? AND port=?", (host, port)) + connection.commit() + finally: + cursor.close() + connection.close() + + def change_remote(self, host, port, status): + host = host.lower() + port = int(port) + dummy = RemoteKN(host, port, status=2) + for rem in self.remotes: + if rem == dummy and rem._status != status: + if status == 2: + rem.disable() + else: + rem.enable() + connection = sqlite3.connect(self._dbase) + cursor = connection.cursor() + try: + cursor.execute("UPDATE kn_remotes SET status=? "\ + "WHERE host=? AND port=?", + (status, host, port)) + connection.commit() + finally: + cursor.close() + connection.close() + return + raise RemoteError("Remote '%s:%d' is not registered." \ + % (host, port)) + + def disable_remote(self, host, port): + self.change_remote(host, port, 2) + + def enable_remote(self, host, port): + self.change_remote(host, port, 0) + + def get_uuid(self): + return self.km.uuid + uuid = property(get_uuid) + + def get_datacontainer(self, dc_id): + skip = [self.uuid] + for remote in self.remotes: + try: + dc_url, skip = remote.get_datacontainer_url(dc_id, skip) + self.km.registerURL(dc_url) + return self.km.getDataContainer(dc_id) + except (DCNotFoundError, UnreachableError, SkipError): + pass + raise DCNotFoundError() + + def handle_datacontainer_url(self): + query = request.GET + skip = loads(query['skip']) + if self.uuid in skip: + # This should not happen during normal operation + self.km.logger.error( + "KN '%s' has been queried although it is in the skip list.") + else: + skip.append(self.uuid) + dc_id = query['dc_id'] + try: + dc = self.km.getDataContainer(dc_id, try_remote=False) + # Wrap data container in temporary HDF5 file + osFileId, filename = mkstemp(suffix='.hdf', + prefix='dcrequest-', + dir=self._tempdir) + os.close(osFileId) + handler = self.km.getH5FileHandler(filename, 'w') + with handler: + handler.saveDataContainer(dc) + dc_url = self.url + "wrapped/" + os.path.basename(filename) + except DCNotFoundError: + dc_url = None + for remote in self.remotes: + try: + dc_url, skip = remote.get_datacontainer_url(dc_id, skip) + break + except (DCNotFoundError, UnreachableError, SkipError): + pass + return {'skip':skip, 'dc_url':dc_url} + + def handle_wrapped(self, filename): + send_file(filename, self._tempdir, + guessmime=False, mimetype='application/x-hdf') + + +def get_kn_autoport(ports, logger=None, *args, **kargs): + """ + Returns a KnowledgeNode listening on the first free port in `ports` + messages are logged to `logger` or stdout if `None` + If no port is free, a socket.error (no. 98) is raised. + """ + import socket + def log(text): + if logger is None: + print text + else: + logger.warn(text) + + last_error = None + for port in ports: + try: + kn = KnowledgeNode(port=port, *args, **kargs) + return kn + except socket.error, err: + last_error = err + try: + #Python 2.6 + eno = err.errno + except AttributeError: + #Python 2.5 + eno = err.args[0] + from errno import (EADDRINUSE, EACCES) + if eno == EADDRINUSE: + log("Port %d is already in use." % port) + elif eno == EACCES: + log("Port %d: Permission denied." % port) + else: + raise err + raise last_error Modified: trunk/src/pyphant/pyphant/core/PyTablesPersister.py =================================================================== --- trunk/src/pyphant/pyphant/core/PyTablesPersister.py 2010-01-11 21:39:34 UTC (rev 662) +++ trunk/src/pyphant/pyphant/core/PyTablesPersister.py 2010-02-01 17:45:00 UTC (rev 663) @@ -105,20 +105,20 @@ input.flush() orderGroup._v_attrs.resultPlug = order[1] -def saveRecipeToHDF5File( recipe, filename ): +def saveRecipeToHDF5File(recipe, filename, saveResults=True): _logger.info( "Saving to %s" % filename ) h5 = tables.openFile(filename, 'w') recipeGroup = h5.createGroup("/", "recipe") resultsGroup = h5.createGroup("/", "results") workers=recipe.getWorkers() for worker in workers: - saveWorker(h5, recipeGroup, worker) + saveWorker(h5, recipeGroup, worker, saveResults) h5.close() -def saveWorker(h5, recipeGroup, worker): +def saveWorker(h5, recipeGroup, worker, saveResults=True): workerGroup = h5.createGroup(recipeGroup, "worker_"+str(hash(worker))) saveBaseAttributes(h5, workerGroup, worker) - savePlugs(h5, workerGroup, worker) + savePlugs(h5, workerGroup, worker, saveResults) saveParameters(h5, workerGroup, worker) def saveParameters(h5, workerGroup, worker): @@ -126,11 +126,11 @@ for (paramName, param) in worker._params.iteritems(): h5.setNodeAttr(paramGroup, paramName, param.value) -def savePlugs(h5, workerGroup, worker): +def savePlugs(h5, workerGroup, worker, saveResults=True): plugs = h5.createGroup(workerGroup, "plugs") for (plugName, plug) in worker._plugs.iteritems(): plugGroup = h5.createGroup(plugs, plugName) - if plug.resultIsAvailable(): + if plug.resultIsAvailable() and saveResults: resId = saveResult(plug._result, h5) h5.setNodeAttr(plugGroup, "result", resId) connectionTable = h5.createTable(plugGroup, 'connections', Connection, expectedrows=len(plug._sockets)) Added: trunk/src/pyphant/pyphant/core/RoutingHTTPServer.py =================================================================== --- trunk/src/pyphant/pyphant/core/RoutingHTTPServer.py (rev 0) +++ trunk/src/pyphant/pyphant/core/RoutingHTTPServer.py 2010-02-01 17:45:00 UTC (rev 663) @@ -0,0 +1,184 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) 2006-2008, Rectorate of the University of Freiburg +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that ... [truncated message content] |
From: <zk...@us...> - 2010-03-14 14:26:13
|
Revision: 671 http://pyphant.svn.sourceforge.net/pyphant/?rev=671&view=rev Author: zklaus Date: 2010-03-14 14:26:07 +0000 (Sun, 14 Mar 2010) Log Message: ----------- Merge branch 'master' into svn-trunk * master: Cosm: Nicer code formatting Fix: Removed deficient ImageSaver visualizer Fix: Removed inadequate batch workers Modified Paths: -------------- trunk/src/pyphant/pyphant/visualizers/ImageVisualizer.py trunk/src/pyphant/pyphant/wxgui2/wxPyphantApplication.py trunk/src/workers/tools/tools/__init__.py Modified: trunk/src/pyphant/pyphant/visualizers/ImageVisualizer.py =================================================================== --- trunk/src/pyphant/pyphant/visualizers/ImageVisualizer.py 2010-02-25 17:31:27 UTC (rev 670) +++ trunk/src/pyphant/pyphant/visualizers/ImageVisualizer.py 2010-03-14 14:26:07 UTC (rev 671) @@ -152,17 +152,5 @@ pylab.ion() pylab.show() -class ImageSaver(object): - name = 'Save Greyscale Image' - def __init__(self, fieldContainer, show=True): - self.fieldContainer = fieldContainer - self.show = show - #testing only: - print("Enter filename: ") - filename = raw_input() - if filename != "": - scipy.misc.imsave('/Users/aheld/CiSE/series/output/' + filename, - fieldContainer.data) DataVisReg.getInstance().registerVisualizer(TYPE_IMAGE, ImageVisualizer) -DataVisReg.getInstance().registerVisualizer(TYPE_IMAGE, ImageSaver) Modified: trunk/src/pyphant/pyphant/wxgui2/wxPyphantApplication.py =================================================================== --- trunk/src/pyphant/pyphant/wxgui2/wxPyphantApplication.py 2010-02-25 17:31:27 UTC (rev 670) +++ trunk/src/pyphant/pyphant/wxgui2/wxPyphantApplication.py 2010-03-14 14:26:07 UTC (rev 671) @@ -46,7 +46,8 @@ logging.basicConfig(level=logging.DEBUG, filename=os.path.join(LOGDIR, u'pyphant.log'), filemode='w', - format="%(asctime)s - %(levelname)s:%(name)s:%(thread)d:%(module)s.%(funcName)s(l %(lineno)d):%(message)s") + format="%(asctime)s - %(levelname)s:%(name)s:%(thread)"\ + "d:%(module)s.%(funcName)s(l %(lineno)d):%(message)s") console = logging.StreamHandler() console.setLevel(logging.WARNING) logging.getLogger('').addHandler(console) @@ -63,6 +64,7 @@ import webbrowser pltform = platform.system() + class wxPyphantApplication(wx.PySimpleApp): def __init__(self, pathToRecipe=None): self.pathToRecipe = pathToRecipe @@ -71,12 +73,12 @@ def OnInit(self): if not wx.PySimpleApp.OnInit(self): return False - self._logger=logging.getLogger("pyphant") + self._logger = logging.getLogger("pyphant") self._excframe = wx.Frame(None, -2, "") sys.excepthook = self.excepthook sogl.SOGLInitialize() self._knowledgeNode = None - self._paramVisReg=ParamVisReg.ParamVisReg() + self._paramVisReg = ParamVisReg.ParamVisReg() self._frame = wxPyphantFrame(self) self._frame.Show() return True @@ -100,7 +102,9 @@ return self._frame def configureWorker(self, worker): - configureFrame=ConfigureFrame.ConfigureFrame(self._frame, self._paramVisReg, worker) + configureFrame = ConfigureFrame.ConfigureFrame(self._frame, + self._paramVisReg, + worker) if configureFrame.ShowModal() == wx.ID_OK: configureFrame.applyAll() @@ -110,40 +114,38 @@ class wxPyphantFrame(wx.Frame): - ID_WINDOW_TOP=100 - ID_WINDOW_LEFT=101 - ID_WINDOW_RIGHT=102 - ID_WINDOW_BOTTOM=103 + ID_WINDOW_TOP = 100 + ID_WINDOW_LEFT = 101 + ID_WINDOW_RIGHT = 102 + ID_WINDOW_BOTTOM = 103 ID_CLOSE_COMPOSITE_WORKER = wx.NewId() ID_UPDATE_PYPHANT = wx.NewId() def __init__(self, _wxPyphantApp): - wx.Frame.__init__(self, None, -1, "wxPyphant %s" % __version__, size=(640,480)) + wx.Frame.__init__(self, None, -1, "wxPyphant %s" % __version__, + size=(640,480)) import PyphantCanvas - self._statusBar=self.CreateStatusBar() - self._wxPyphantApp=_wxPyphantApp + self._statusBar = self.CreateStatusBar() + self._wxPyphantApp = _wxPyphantApp self._initMenuBar() self._initSash() - self.recipeState=None + self.recipeState = None self.onOpenCompositeWorker(None) self._workerRepository.Bind(wx.EVT_SASH_DRAGGED_RANGE, self.onFoldPanelBarDrag, id=self.ID_WINDOW_TOP, id2=self.ID_WINDOW_BOTTOM) self.Bind(wx.EVT_SIZE, self.onSize) - self.compositeWorkerStack=[] + self.compositeWorkerStack = [] wx.MessageBox("Located log directory at %s.\n" "Logging will go to %s." % (LOGDIR, os.path.join(LOGDIR, 'pyphant.log')), "Logging info") def _initSash(self): - self._workerRepository = wx.SashLayoutWindow(self, - self.ID_WINDOW_RIGHT, - wx.DefaultPosition, - wx.Size(200,1000), - wx.NO_BORDER |wx.SW_3D - | wx.CLIP_CHILDREN) + self._workerRepository = wx.SashLayoutWindow( + self, self.ID_WINDOW_RIGHT, wx.DefaultPosition, wx.Size(200,1000), + wx.NO_BORDER | wx.SW_3D | wx.CLIP_CHILDREN) self._workerRepository.SetDefaultSize(wx.Size(220,1000)) self._workerRepository.SetOrientation(wx.LAYOUT_VERTICAL) self._workerRepository.SetAlignment(wx.LAYOUT_RIGHT) @@ -159,7 +161,8 @@ if event.GetDragStatus() == wx.SASH_STATUS_OUT_OF_RANGE: return if event.GetId() == self.ID_WINDOW_RIGHT: - self._workerRepository.SetDefaultSize(wx.Size(event.GetDragRect().width, 1000)) + self._workerRepository.SetDefaultSize( + wx.Size(event.GetDragRect().width, 1000)) # Leaves bits of itself behind sometimes wx.LayoutAlgorithm().LayoutWindow(self, self._remainingSpace) self._remainingSpace.Refresh() @@ -168,58 +171,58 @@ def onOpenCompositeWorker(self, event): if not self._wxPyphantApp.pathToRecipe: if pltform == 'Linux' or pltform == 'Darwin': - osMessage = "Choose an existing recipe or cancel to create a new recipe" - elif pltform=='Windows': - osMessage = "Choose existing recipe to open or name a new recipe to create" + osMessage = "Choose an existing recipe or cancel to create "\ + "a new recipe" + elif pltform == 'Windows': + osMessage = "Choose existing recipe to open or name a new "\ + "recipe to create" else: raise OSError, "Operating System %s not supported!" % pltform wc = "Pyphant Recipe(*.h5)|*.h5" - dlg = wx.FileDialog( self, message=osMessage, defaultDir=os.getcwd(), - defaultFile="", wildcard=wc, style=wx.OPEN) + dlg = wx.FileDialog(self, message=osMessage, defaultDir=os.getcwd(), + defaultFile="", wildcard=wc, style=wx.OPEN) if dlg.ShowModal() == wx.ID_OK: self._wxPyphantApp.pathToRecipe = dlg.GetPath() else: dlg.Destroy() - dlg = wx.FileDialog( self, message='Create a new recipe', defaultDir=os.getcwd(), - defaultFile="", wildcard=wc, style=wx.SAVE) + dlg = wx.FileDialog(self, message='Create a new recipe', + defaultDir=os.getcwd(), defaultFile="", + wildcard=wc, style=wx.SAVE) if dlg.ShowModal() == wx.ID_OK: path = dlg.GetPath() - if not path[:-3]=='.h5': - path+='.h5' + if not path[:-3] == '.h5': + path += '.h5' self._wxPyphantApp.pathToRecipe = path dlg.Destroy() - import PyphantCanvas if self._wxPyphantApp.pathToRecipe[-3:] == '.h5': if os.path.exists(self._wxPyphantApp.pathToRecipe): - recipe = pyphant.core.PyTablesPersister.loadRecipeFromHDF5File(self._wxPyphantApp.pathToRecipe) - #from pyphant.core import KnowledgeManager - #KnowledgeManager.KnowledgeManager.getInstance().registerURL( - # "file:///"+os.path.realpath(self._wxPyphantApp.pathToRecipe) - # ) - self._remainingSpace=PyphantCanvas.PyphantCanvas(self, recipe) + recipe = pyphant.core.PyTablesPersister.loadRecipeFromHDF5File( + self._wxPyphantApp.pathToRecipe) + self._remainingSpace = PyphantCanvas.PyphantCanvas(self, recipe) else: - self._remainingSpace=PyphantCanvas.PyphantCanvas(self) + self._remainingSpace = PyphantCanvas.PyphantCanvas(self) else: - raise IOError("Unknown file format in file \""+self._wxPyphantApp.pathToRecipe+"\"") - self.recipeState='clean' + raise IOError("Unknown file format in file \"%\""\ + % self._wxPyphantApp.pathToRecipe) + self.recipeState = 'clean' self._remainingSpace.diagram.recipe.registerListener(self.recipeChanged) def recipeChanged(self, event): - self.recipeState='dirty' + self.recipeState = 'dirty' def onSaveCompositeWorker(self, event=None): pyphant.core.PyTablesPersister.saveRecipeToHDF5File( self._remainingSpace.diagram.recipe, self._wxPyphantApp.pathToRecipe, self._fileMenu.IsChecked(wx.ID_FILE4)) - self.recipeState='clean' + self.recipeState = 'clean' def onSaveAsCompositeWorker(self, event=None): msg = "Select file to save recipe." wc = "Pyphant recipe (*.h5)|*.h5" - dlg = wx.FileDialog(self, message = msg, defaultDir = os.getcwd(), - defaultFile = "", wildcard = wc, style = wx.SAVE) + dlg = wx.FileDialog(self, message=msg, defaultDir=os.getcwd(), + defaultFile="", wildcard=wc, style=wx.SAVE) if dlg.ShowModal() == wx.ID_OK: filename = dlg.GetPath() if not filename.endswith(".h5"): @@ -229,30 +232,32 @@ filename, self._fileMenu.IsChecked(wx.ID_FILE4)) self._wxPyphantApp.pathToRecipe = filename - self.recipeState='clean' + self.recipeState = 'clean' else: dlg.Destroy() def _initMenuBar(self): self._menuBar = wx.MenuBar() self._fileMenu = wx.Menu() - #self._fileMenu.Append( wx.ID_NEW, "&New\tCTRL+n") - #self._fileMenu.Append( wx.ID_OPEN, "&Open\tCTRL+o") + #self._fileMenu.Append(wx.ID_NEW, "&New\tCTRL+n") + #self._fileMenu.Append(wx.ID_OPEN, "&Open\tCTRL+o") self._fileMenu.AppendCheckItem(wx.ID_FILE4, "Save &results\tCTRL+r") self._fileMenu.Check(wx.ID_FILE4, True) - self._fileMenu.Append( wx.ID_SAVE, "&Save\tCTRL+s") - self._fileMenu.Append( wx.ID_SAVEAS, "Save &as\tCTRL+a") - self._fileMenu.Append( wx.ID_EXIT, "E&xit" ) - self._fileMenu.Append( wx.ID_FILE1, "Import HDF5 or FMF from &URL" ) - self._fileMenu.Append( wx.ID_FILE2, "&Import local HDF5 or FMF file") - self._fileMenu.Append( wx.ID_FILE3, "Start/pause sharing &knowledge") - self._menuBar.Append( self._fileMenu, "&File" ) + self._fileMenu.Append(wx.ID_SAVE, "&Save\tCTRL+s") + self._fileMenu.Append(wx.ID_SAVEAS, "Save &as\tCTRL+a") + self._fileMenu.Append(wx.ID_EXIT, "E&xit") + self._fileMenu.Append(wx.ID_FILE1, "Import HDF5 or FMF from &URL") + self._fileMenu.Append(wx.ID_FILE2, "&Import local HDF5 or FMF file") + self._fileMenu.Append(wx.ID_FILE3, "Start/pause sharing &knowledge") + self._menuBar.Append(self._fileMenu, "&File") self._closeCompositeWorker = wx.Menu() - self._closeCompositeWorker.Append(self.ID_CLOSE_COMPOSITE_WORKER, "&Close Composite Worker") - self._menuBar.Append( self._closeCompositeWorker, "&Close Composite Worker") + self._closeCompositeWorker.Append(self.ID_CLOSE_COMPOSITE_WORKER, + "&Close Composite Worker") + self._menuBar.Append(self._closeCompositeWorker, + "&Close Composite Worker") self._updateMenu = self.createUpdateMenu() - self._menuBar.Append( self._updateMenu, "&Update") - self.SetMenuBar( self._menuBar ) + self._menuBar.Append(self._updateMenu, "&Update") + self.SetMenuBar(self._menuBar) self._menuBar.EnableTop(1, False) #self.Bind(wx.EVT_MENU, self.onCreateNew, id=wx.ID_NEW) #self.Bind(wx.EVT_MENU, self.onOpenCompositeWorker, id=wx.ID_OPEN) @@ -260,21 +265,23 @@ self.Bind(wx.EVT_MENU, self.onSaveAsCompositeWorker, id=wx.ID_SAVEAS) self.Bind(wx.EVT_CLOSE, self.onClose) self.Bind(wx.EVT_MENU, self.onQuit, id=wx.ID_EXIT) - self.Bind(wx.EVT_MENU, self.onCloseCompositeWorker, id=self.ID_CLOSE_COMPOSITE_WORKER) + self.Bind(wx.EVT_MENU, self.onCloseCompositeWorker, + id=self.ID_CLOSE_COMPOSITE_WORKER) self.Bind(wx.EVT_MENU, self.onImportURL, id=wx.ID_FILE1) self.Bind(wx.EVT_MENU, self.onImportLocal, id=wx.ID_FILE2) self.Bind(wx.EVT_MENU, self.onShare, id=wx.ID_FILE3) def createUpdateMenu(self): updateMenu = wx.Menu() - updateMenu.Append( self.ID_UPDATE_PYPHANT, "Update &Pyphant" ) + updateMenu.Append(self.ID_UPDATE_PYPHANT, "Update &Pyphant") self.Bind(wx.EVT_MENU, self.onUpdatePyphant, id=self.ID_UPDATE_PYPHANT) self.updateIds = { self.ID_UPDATE_PYPHANT : 'pyphant' } for toolbox in pkg_resources.iter_entry_points("pyphant.workers"): dist = toolbox.dist nId = wx.NewId() self.updateIds[nId] = dist.key - updateMenu.Append( nId, "Update %s (%s)" % (dist.project_name, dist.version) ) + updateMenu.Append(nId, "Update %s (%s)"\ + % (dist.project_name, dist.version)) self.Bind(wx.EVT_MENU, self.onUpdatePyphant, id=nId) return updateMenu @@ -286,11 +293,13 @@ self.Close() def onClose(self, event): - dlgid=None - if self.recipeState!='clean': + dlgid = None + if self.recipeState != 'clean': cpt = "Save changed recipe?" - msg = "The recipe has changed since the last saving.\nDo you want to save before terminating?" - dlg = wx.MessageDialog(self, msg, cpt, style=wx.YES|wx.NO|wx.CANCEL|wx.ICON_QUESTION) + msg = "The recipe has changed since the last saving.\n"\ + "Do you want to save before terminating?" + dlg = wx.MessageDialog( + self, msg, cpt, style=wx.YES|wx.NO|wx.CANCEL|wx.ICON_QUESTION) dlgid = dlg.ShowModal() if dlgid == wx.ID_YES: self.onSaveCompositeWorker() @@ -306,24 +315,25 @@ def editCompositeWorker(self, worker): import PyphantCanvas self.compositeWorkerStack.append(self._remainingSpace) - self._remainingSpace=PyphantCanvas.PyphantCanvas(self, worker) + self._remainingSpace = PyphantCanvas.PyphantCanvas(self, worker) self._remainingSpace.diagram.recipe.registerListener(self.recipeChanged) self._menuBar.EnableTop(1, True) def onCloseCompositeWorker(self, event): self._remainingSpace.Destroy() - self._remainingSpace=self.compositeWorkerStack.pop() - if len(self.compositeWorkerStack)==0: + self._remainingSpace = self.compositeWorkerStack.pop() + if len(self.compositeWorkerStack) == 0: self._menuBar.EnableTop(1, False) def onImportURL(self, event): cpt = "Import HDF5 or FMF from URL" - msg = "Enter an URL to a valid HDF5 or FMF file \ -(e.g. http://www.example.org/data.h5).\n\ -The file is stored permanently in your home directory in the \ -.pyphant directory\nand all DataContainers contained in that file are \ -available by using the\nEmd5Src Worker even after restarting wxPyphant.\n\ -HTTP redirects are resolved automatically, i.e. DOIs are supported as well." + msg = "Enter an URL to a valid HDF5 or FMF file "\ + "(e.g. http://www.example.org/data.h5).\n"\ + "The file is stored permanently in your home directory in the "\ + ".pyphant directory\nand all DataContainers contained in that "\ + "file are available by using the\nEmd5Src Worker even after "\ + "restarting wxPyphant.\nHTTP redirects are resolved "\ + "automatically, i.e. DOIs are supported as well." dlg = wx.TextEntryDialog(self, msg, cpt) dlgid = dlg.ShowModal() if dlgid != wx.ID_CANCEL: @@ -345,8 +355,8 @@ def onImportLocal(self, event): msg = "Select HDF5 or FMF file to import DataContainer(s) from." wc = "*.h5, *.hdf, *.hdf5, *.fmf|*.h5;*.hdf;*.hdf5;*.fmf" - dlg = wx.FileDialog(self, message = msg, defaultDir = os.getcwd(), - defaultFile = "", wildcard = wc, style = wx.OPEN) + dlg = wx.FileDialog(self, message=msg, defaultDir=os.getcwd(), + defaultFile="", wildcard=wc, style=wx.OPEN) if dlg.ShowModal() == wx.ID_OK: filename = dlg.GetPath() url = 'file://' + os.path.realpath(filename) @@ -358,8 +368,8 @@ km.registerURL(url) except Exception: cpt2 = "Error" - msg2 = "'%s' is not a valid HDF5 or FMF file.\n\ -(Tried to import from '%s')" % (filename, url) + msg2 = "'%s' is not a valid HDF5 or FMF file.\n"\ + "(Tried to import from '%s')" % (filename, url) finally: dlg2 = wx.MessageDialog(self, msg2, cpt2, wx.OK) dlgid2 = dlg2.ShowModal() Modified: trunk/src/workers/tools/tools/__init__.py =================================================================== --- trunk/src/workers/tools/tools/__init__.py 2010-02-25 17:31:27 UTC (rev 670) +++ trunk/src/workers/tools/tools/__init__.py 2010-03-14 14:26:07 UTC (rev 671) @@ -40,10 +40,9 @@ # $Source$ workers=[ - "Emd5Src", - "BatchHead", - "BatchTail", - "BatchExtractor", - "ParameterRun" + "Emd5Src"#, +# "BatchHead", +# "BatchTail", +# "BatchExtractor", +# "ParameterRun" ] - This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <zk...@us...> - 2010-04-06 10:10:03
|
Revision: 674 http://pyphant.svn.sourceforge.net/pyphant/?rev=674&view=rev Author: zklaus Date: 2010-04-06 10:09:55 +0000 (Tue, 06 Apr 2010) Log Message: ----------- Merge branch 'master' into svn-trunk * master: Testcases for FMF version 1.1 introduced. TestFMFLoader checks physical constants being adapted to new CODATA recommendations. TestFMFLoader checks the correct interpretation of FMF 1.0 files. Enh. documentation of quantities and quantities.PhysicalQuantities modules. Included PhysicalQuantities module from revision 7dfc86beb3c71b25438bfb89b3e242bc9e3fca3c in order to distinct between FMF 1.0 and FMF 1.1 files. Adapted units parsec and galUS to version 1.1 of FMF. Abbreviated definition of Hartree and added missing invcm. Renamed abbreviation of Avogadro number to NA. Changed abbreviation of unified atomic mass unit to 'u'. Added Rydberg constant. Corrected constants Fa, e, me, mp, Nav, and k due to CODATA-2006 recommendataion. Bugfix: module PhysicalQuantities is now named quantities Introduced Faraday constant Fa Added Faradays constant. Renamed abbriviation of the gravitational constant to 'G' and corrected the value with respect to the CODATA 2006 recommendation. Modified Paths: -------------- trunk/src/pyphant/pyphant/core/DataContainer.py trunk/src/pyphant/pyphant/core/FieldContainer.py trunk/src/pyphant/pyphant/quantities/ParseQuantities.py trunk/src/pyphant/pyphant/quantities/__init__.py trunk/src/pyphant/pyphant/tests/TestParseQuantities.py trunk/src/workers/ImageProcessing/ImageProcessing/AutoFocus.py trunk/src/workers/OSC/OSC/CompareFields.py trunk/src/workers/OSC/OSC/ComputeFunctional.py trunk/src/workers/OSC/OSC/ErrorEstimator.py trunk/src/workers/OSC/OSC/EstimateParameter.py trunk/src/workers/OSC/OSC/ExtremumFinder.py trunk/src/workers/OSC/OSC/OscCurrent.py trunk/src/workers/OSC/OSC/Smoother.py trunk/src/workers/OSC/OSC/ThicknessModeller.py trunk/src/workers/fmfile/fmfile/FMFLoader.py trunk/src/workers/fmfile/fmfile/tests/TestFMFLoader.py Added Paths: ----------- trunk/src/pyphant/pyphant/quantities/PhysicalQuantities.py Modified: trunk/src/pyphant/pyphant/core/DataContainer.py =================================================================== --- trunk/src/pyphant/pyphant/core/DataContainer.py 2010-03-20 21:59:38 UTC (rev 673) +++ trunk/src/pyphant/pyphant/core/DataContainer.py 2010-04-06 10:09:55 UTC (rev 674) @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Copyright (c) 2006-2009, Rectorate of the University of Freiburg -# Copyright (c) 2009, Andreas W. Liehr (li...@us...) +# Copyright (c) 2009-2010, Andreas W. Liehr (li...@us...) # All rights reserved. # # Redistribution and use in source and binary forms, with or without @@ -196,7 +196,7 @@ \t .data \t- Table of samples stored in a numpy.ndarray. \t .desc \t- Description numpy.dtype of the ndarray. -\t .units \t- List of quantities.objects denoting the units of +\t .units \t- List of quantities objects denoting the units of \t\t\t the columns. \t .longname \t- Notation of the data, e.g. 'database query', \t\t\t which is used for the automatic annotation of charts. Modified: trunk/src/pyphant/pyphant/core/FieldContainer.py =================================================================== --- trunk/src/pyphant/pyphant/core/FieldContainer.py 2010-03-20 21:59:38 UTC (rev 673) +++ trunk/src/pyphant/pyphant/core/FieldContainer.py 2010-04-06 10:09:55 UTC (rev 674) @@ -2,7 +2,7 @@ from __future__ import with_statement # Copyright (c) 2006-2009, Rectorate of the University of Freiburg -# Copyright (c) 2009, Andreas W. Liehr (li...@us...) +# Copyright (c) 2009-2010, Andreas W. Liehr (li...@us...) # All rights reserved. # # Redistribution and use in source and binary forms, with or without Modified: trunk/src/pyphant/pyphant/quantities/ParseQuantities.py =================================================================== --- trunk/src/pyphant/pyphant/quantities/ParseQuantities.py 2010-03-20 21:59:38 UTC (rev 673) +++ trunk/src/pyphant/pyphant/quantities/ParseQuantities.py 2010-04-06 10:09:55 UTC (rev 674) @@ -40,8 +40,12 @@ import mx.DateTime.ISO from pyphant.quantities import Quantity +from pyphant.quantities.PhysicalQuantities import PhysicalQuantity -def str2unit(unit): +import logging +_logger = logging.getLogger("pyphant") + +def str2unit(unit,FMFversion='1.1'): """The function str2unit returns either a quantity or a float from a given string.""" # Prepare conversion to quantity if unit.startswith('.'): @@ -59,14 +63,22 @@ elif not (unit[0].isdigit() or unit[0]=='-'): unit = '1'+unit # Convert input to quantity or float - try: + if FMFversion not in ['1.0','1.1']: + raise ValueError, 'FMFversion %s not supported.' % FMFversion + else: unit = unit.replace('^', '**') - unit = Quantity(unit.encode('utf-8')) - except: - unit = float(unit) + try: + unit = unit.replace('^', '**') + if FMFversion=='1.1': + unit = Quantity(unit.encode('utf-8')) + elif FMFversion=='1.0': + unit1_0 = PhysicalQuantity(unit.encode('utf-8')) + unit = Quantity(str(unit1_0.inBaseUnits())) + except: + unit = float(unit) return unit -def parseQuantity(value): +def parseQuantity(value,FMFversion='1.1'): import re pm = re.compile(ur"(?:\\pm|\+-|\+/-)") try: @@ -76,10 +88,10 @@ if value.startswith('('): value = float(value[1:]) error, unit = [s.strip() for s in error.split(')')] - unit = str2unit(unit) + unit = str2unit(unit,FMFversion) value *= unit else: - value = str2unit(value) + value = str2unit(value,FMFversion) if error != None: if error.endswith('%'): error = value*float(error[:-1])/100.0 @@ -87,17 +99,17 @@ try: error = float(error)*unit except: - error = str2unit(error) + error = str2unit(error,FMFversion) return value, error -def parseVariable(oldVal): +def parseVariable(oldVal,FMFversion='1.1'): shortname, value = tuple([s.strip() for s in oldVal.split('=')]) - value, error = parseQuantity(value) + value, error = parseQuantity(value,FMFversion) return (shortname, value, error) -def parseDateTime(value): +def parseDateTime(value,FMFversion='1.1'): """ - >>>parseDateTime('2004-08-21 12:00:00+-12h') + >>>parseDateTime('2004-08-21 12:00:00+-12hr') (Quantity(731814.5,'d'), Quantity(0.5,'d')) >>>parseDateTime('2004-08-21 12:00:00') (Quantity(731814.5,'d'), None) @@ -105,7 +117,11 @@ datetimeWithError = value.split('+-') if len(datetimeWithError)==2: datetime = mx.DateTime.ISO.ParseAny(datetimeWithError[0]) - error = parseQuantity(datetimeWithError[1])[0].inUnitsOf('d') + uncertainty = parseQuantity(datetimeWithError[1],FMFversion)[0] + if uncertainty.isCompatible('h'): + _logger.warning("The uncertainty of timestamp %s has the unit 'h', which is deprecated. The correct abbreviation for hour is 'hr'." % value) + uncertainty = uncertainty*Quantity('1hr/h') + error = uncertainty.inUnitsOf('d') else: datetime = mx.DateTime.ISO.ParseAny(value) error = None Copied: trunk/src/pyphant/pyphant/quantities/PhysicalQuantities.py (from rev 673, trunk/src/pyphant/pyphant/quantities/__init__.py) =================================================================== --- trunk/src/pyphant/pyphant/quantities/PhysicalQuantities.py (rev 0) +++ trunk/src/pyphant/pyphant/quantities/PhysicalQuantities.py 2010-04-06 10:09:55 UTC (rev 674) @@ -0,0 +1,1004 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) 1998-2007, Konrad Hinsen <hi...@cn...> +# Copyright (c) 2008-2009, Rectorate of the University of Freiburg +# Copyright (c) 2010, Andreas W. Liehr <li...@us...> +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the Freiburg Materials Research Center, +# University of Freiburg nor the names of its contributors may be used to +# endorse or promote products derived from this software without specific +# prior written permission. +# +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER +# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +""" +Quantities with units + +based on the module Scientific.Physics.PhysicalQuantities +written by Conrad Hinsen with contributions from Greg Ward. +A comprehensive documentation of these units is given in +Riede et al: On the Communication of Scientific Results: +The Full-Metadata Format (http://arxiv.org/abs/0904.1299), +which defines version 1.0 of the Full-Metadata Format. + +This module provides a data type that represents a physical +quantity together with its unit. It is possible to add and +subtract these quantities if the units are compatible, and +a quantity can be converted to another compatible unit. +Multiplication, subtraction, and raising to integer powers +is allowed without restriction, and the result will have +the correct unit. A quantity can be raised to a non-integer +power only if the result can be represented by integer powers +of the base units. + +The values of physical constants are taken from the 1986 +recommended values from CODATA. Other conversion factors +(e.g. for British units) come from various sources. I can't +guarantee for the correctness of all entries in the unit +table, so use this at your own risk. +""" + +rc = { 'fetchCurrencyRates' : False } + +class NumberDict(dict): + + """ + Dictionary storing numerical values + + Constructor: NumberDict() + + An instance of this class acts like an array of number with + generalized (non-integer) indices. A value of zero is assumed + for undefined entries. NumberDict instances support addition, + and subtraction with other NumberDict instances, and multiplication + and division by scalars. + """ + + def __getitem__(self, item): + try: + return dict.__getitem__(self, item) + except KeyError: + return 0 + + def __coerce__(self, other): + if type(other) == type({}): + other = NumberDict(other) + return self, other + + def __add__(self, other): + sum_dict = NumberDict() + for key in self.keys(): + sum_dict[key] = self[key] + for key in other.keys(): + sum_dict[key] = sum_dict[key] + other[key] + return sum_dict + + def __sub__(self, other): + sum_dict = NumberDict() + for key in self.keys(): + sum_dict[key] = self[key] + for key in other.keys(): + sum_dict[key] = sum_dict[key] - other[key] + return sum_dict + + def __mul__(self, other): + new = NumberDict() + for key in self.keys(): + new[key] = other*self[key] + return new + __rmul__ = __mul__ + + def __div__(self, other): + new = NumberDict() + for key in self.keys(): + new[key] = self[key]/other + return new + +import numpy.oldnumeric +def int_sum(a, axis=0): + return numpy.oldnumeric.add.reduce(a, axis) +def zeros_st(shape, other): + return numpy.oldnumeric.zeros(shape, dtype=other.dtype) +from numpy import ndarray as array_type + + +import re, string + + +# Class definitions + +class PhysicalQuantity: + + """ + Physical quantity with units + + PhysicalQuantity instances allow addition, subtraction, + multiplication, and division with each other as well as + multiplication, division, and exponentiation with numbers. + Addition and subtraction check that the units of the two operands + are compatible and return the result in the units of the first + operand. A limited set of mathematical functions (from module + Numeric) is applicable as well: + + - sqrt: equivalent to exponentiation with 0.5. + + - sin, cos, tan: applicable only to objects whose unit is + compatible with 'rad'. + + See the documentation of the PhysicalQuantities module for a list + of the available units. + + Here is an example on usage: + + >>> from PhysicalQuantities import PhysicalQuantity as p # short hand + >>> distance1 = p('10 m') + >>> distance2 = p('10 km') + >>> total = distance1 + distance2 + >>> total + PhysicalQuantity(10010.0,'m') + >>> total.convertToUnit('km') + >>> total.getValue() + 10.01 + >>> total.getUnitName() + 'km' + >>> total = total.inBaseUnits() + >>> total + PhysicalQuantity(10010.0,'m') + >>> + >>> t = p(314159., 's') + >>> # convert to days, hours, minutes, and second: + >>> t2 = t.inUnitsOf('d','h','min','s') + >>> t2_print = ' '.join([str(i) for i in t2]) + >>> t2_print + '3.0 d 15.0 h 15.0 min 59.0 s' + >>> + >>> e = p('2.7 Hartree*Nav') + >>> e.convertToUnit('kcal/mol') + >>> e + PhysicalQuantity(1694.2757596034764,'kcal/mol') + >>> e = e.inBaseUnits() + >>> str(e) + '7088849.77818 kg*m**2/s**2/mol' + >>> + >>> freeze = p('0 degC') + >>> freeze = freeze.inUnitsOf ('degF') + >>> str(freeze) + '32.0 degF' + >>> + """ + + def __init__(self, *args): + """ + There are two constructor calling patterns: + + 1. PhysicalQuantity(value, unit), where value is any number + and unit is a string defining the unit + + 2. PhysicalQuantity(value_with_unit), where value_with_unit + is a string that contains both the value and the unit, + i.e. '1.5 m/s'. This form is provided for more convenient + interactive use. + + @param args: either (value, unit) or (value_with_unit,) + @type args: (number, C{str}) or (C{str},) + """ + if len(args) == 2: + self.value = args[0] + self.unit = _findUnit(args[1]) + else: + s = string.strip(args[0]) + match = PhysicalQuantity._number.match(s) + if match is None: + raise TypeError('No number found') + self.value = string.atof(match.group(0)) + self.unit = _findUnit(s[len(match.group(0)):]) + + _number = re.compile('[+-]?[0-9]+(\\.[0-9]*)?([eE][+-]?[0-9]+)?') + + def __str__(self): + return str(self.value) + ' ' + self.unit.name() + + def __repr__(self): + return (self.__class__.__name__ + '(' + `self.value` + ',' + + `self.unit.name()` + ')') + + def _sum(self, other, sign1, sign2): + if not isPhysicalQuantity(other): + raise TypeError('Incompatible types') + new_value = sign1*self.value + \ + sign2*other.value*other.unit.conversionFactorTo(self.unit) + return self.__class__(new_value, self.unit) + + def __add__(self, other): + return self._sum(other, 1, 1) + + __radd__ = __add__ + + def __sub__(self, other): + return self._sum(other, 1, -1) + + def __rsub__(self, other): + return self._sum(other, -1, 1) + + def __cmp__(self, other): + diff = self._sum(other, 1, -1) + return cmp(diff.value, 0) + + def __mul__(self, other): + if not isPhysicalQuantity(other): + return self.__class__(self.value*other, self.unit) + value = self.value*other.value + unit = self.unit*other.unit + if unit.isDimensionless(): + return value*unit.factor + else: + return self.__class__(value, unit) + + __rmul__ = __mul__ + + def __div__(self, other): + if not isPhysicalQuantity(other): + return self.__class__(self.value/other, self.unit) + value = self.value/other.value + unit = self.unit/other.unit + if unit.isDimensionless(): + return value*unit.factor + else: + return self.__class__(value, unit) + + def __rdiv__(self, other): + if not isPhysicalQuantity(other): + return self.__class__(other/self.value, pow(self.unit, -1)) + value = other.value/self.value + unit = other.unit/self.unit + if unit.isDimensionless(): + return value*unit.factor + else: + return self.__class__(value, unit) + + def __pow__(self, other): + if isPhysicalQuantity(other): + raise TypeError('Exponents must be dimensionless') + return self.__class__(pow(self.value, other), pow(self.unit, other)) + + def __rpow__(self, other): + raise TypeError('Exponents must be dimensionless') + + def __abs__(self): + return self.__class__(abs(self.value), self.unit) + + def __pos__(self): + return self + + def __neg__(self): + return self.__class__(-self.value, self.unit) + + def __nonzero__(self): + return self.value != 0 + + def convertToUnit(self, unit): + """ + Change the unit and adjust the value such that + the combination is equivalent to the original one. The new unit + must be compatible with the previous unit of the object. + + @param unit: a unit + @type unit: C{str} + @raise TypeError: if the unit string is not a know unit or a + unit incompatible with the current one + """ + unit = _findUnit(unit) + self.value = _convertValue (self.value, self.unit, unit) + self.unit = unit + + def inUnitsOf(self, *units): + """ + Express the quantity in different units. If one unit is + specified, a new PhysicalQuantity object is returned that + expresses the quantity in that unit. If several units + are specified, the return value is a tuple of + PhysicalObject instances with with one element per unit such + that the sum of all quantities in the tuple equals the the + original quantity and all the values except for the last one + are integers. This is used to convert to irregular unit + systems like hour/minute/second. + + @param units: one or several units + @type units: C{str} or sequence of C{str} + @returns: one or more physical quantities + @rtype: L{PhysicalQuantity} or C{tuple} of L{PhysicalQuantity} + @raises TypeError: if any of the specified units are not compatible + with the original unit + """ + units = map(_findUnit, units) + if len(units) == 1: + unit = units[0] + value = _convertValue (self.value, self.unit, unit) + return self.__class__(value, unit) + else: + units.sort() + result = [] + value = self.value + unit = self.unit + for i in range(len(units)-1,-1,-1): + value = value*unit.conversionFactorTo(units[i]) + if i == 0: + rounded = value + else: + rounded = _round(value) + result.append(self.__class__(rounded, units[i])) + value = value - rounded + unit = units[i] + return tuple(result) + + # Contributed by Berthold Hoellmann + def inBaseUnits(self): + """ + @returns: the same quantity converted to base units, + i.e. SI units in most cases + @rtype: L{PhysicalQuantity} + """ + new_value = self.value * self.unit.factor + num = '' + denom = '' + for i in xrange(9): + unit = _base_names[i] + power = self.unit.powers[i] + if power < 0: + denom = denom + '/' + unit + if power < -1: + denom = denom + '**' + str(-power) + elif power > 0: + num = num + '*' + unit + if power > 1: + num = num + '**' + str(power) + if len(num) == 0: + num = '1' + else: + num = num[1:] + return self.__class__(new_value, num + denom) + + def isCompatible (self, unit): + """ + @param unit: a unit + @type unit: C{str} + @returns: C{True} if the specified unit is compatible with the + one of the quantity + @rtype: C{bool} + """ + unit = _findUnit (unit) + return self.unit.isCompatible (unit) + + def getValue(self): + """Return value (float) of physical quantity (no unit).""" + return self.value + + def getUnitName(self): + """Return unit (string) of physical quantity.""" + return self.unit.name() + + def sqrt(self): + return pow(self, 0.5) + + def sin(self): + if self.unit.isAngle(): + return numpy.oldnumeric.sin(self.value * \ + self.unit.conversionFactorTo(_unit_table['rad'])) + else: + raise TypeError('Argument of sin must be an angle') + + def cos(self): + if self.unit.isAngle(): + return numpy.oldnumeric.cos(self.value * \ + self.unit.conversionFactorTo(_unit_table['rad'])) + else: + raise TypeError('Argument of cos must be an angle') + + def tan(self): + if self.unit.isAngle(): + return numpy.oldnumeric.tan(self.value * \ + self.unit.conversionFactorTo(_unit_table['rad'])) + else: + raise TypeError('Argument of tan must be an angle') + + +class PhysicalUnit: + + """ + Physical unit + + A physical unit is defined by a name (possibly composite), a scaling + factor, and the exponentials of each of the SI base units that enter into + it. Units can be multiplied, divided, and raised to integer powers. + """ + + def __init__(self, names, factor, powers, offset=0): + """ + @param names: a dictionary mapping each name component to its + associated integer power (e.g. C{{'m': 1, 's': -1}}) + for M{m/s}). As a shorthand, a string may be passed + which is assigned an implicit power 1. + @type names: C{dict} or C{str} + @param factor: a scaling factor + @type factor: C{float} + @param powers: the integer powers for each of the nine base units + @type powers: C{list} of C{int} + @param offset: an additive offset to the base unit (used only for + temperatures) + @type offset: C{float} + """ + if type(names) == type(''): + self.names = NumberDict() + self.names[names] = 1 + else: + self.names = names + self.factor = factor + self.offset = offset + self.powers = powers + + def __repr__(self): + return '<PhysicalUnit ' + self.name() + '>' + + __str__ = __repr__ + + def __cmp__(self, other): + if self.powers != other.powers: + raise TypeError('Incompatible units') + return cmp(self.factor, other.factor) + + def __mul__(self, other): + if self.offset != 0 or (isPhysicalUnit (other) and other.offset != 0): + raise TypeError("cannot multiply units with non-zero offset") + if isPhysicalUnit(other): + return PhysicalUnit(self.names+other.names, + self.factor*other.factor, + map(lambda a,b: a+b, + self.powers, other.powers)) + else: + return PhysicalUnit(self.names+{str(other): 1}, + self.factor*other, + self.powers, + self.offset * other) + + __rmul__ = __mul__ + + def __div__(self, other): + if self.offset != 0 or (isPhysicalUnit (other) and other.offset != 0): + raise TypeError("cannot divide units with non-zero offset") + if isPhysicalUnit(other): + return PhysicalUnit(self.names-other.names, + self.factor/other.factor, + map(lambda a,b: a-b, + self.powers, other.powers)) + else: + return PhysicalUnit(self.names+{str(other): -1}, + self.factor/other, self.powers) + + def __rdiv__(self, other): + if self.offset != 0 or (isPhysicalUnit (other) and other.offset != 0): + raise TypeError("cannot divide units with non-zero offset") + if isPhysicalUnit(other): + return PhysicalUnit(other.names-self.names, + other.factor/self.factor, + map(lambda a,b: a-b, + other.powers, self.powers)) + else: + return PhysicalUnit({str(other): 1}-self.names, + other/self.factor, + map(lambda x: -x, self.powers)) + + def __pow__(self, other): + if self.offset != 0: + raise TypeError("cannot exponentiate units with non-zero offset") + if isinstance(other, int): + return PhysicalUnit(other*self.names, pow(self.factor, other), + map(lambda x,p=other: x*p, self.powers)) + if isinstance(other, float): + inv_exp = 1./other + rounded = int(numpy.oldnumeric.floor(inv_exp+0.5)) + if abs(inv_exp-rounded) < 1.e-10: + if reduce(lambda a, b: a and b, + map(lambda x, e=rounded: x%e == 0, self.powers)): + f = pow(self.factor, other) + p = map(lambda x,p=rounded: x/p, self.powers) + if reduce(lambda a, b: a and b, + map(lambda x, e=rounded: x%e == 0, + self.names.values())): + names = self.names/rounded + else: + names = NumberDict() + if f != 1.: + names[str(f)] = 1 + for i in range(len(p)): + names[_base_names[i]] = p[i] + return PhysicalUnit(names, f, p) + else: + raise TypeError('Illegal exponent') + raise TypeError('Only integer and inverse integer exponents allowed') + + def conversionFactorTo(self, other): + """ + @param other: another unit + @type other: L{PhysicalUnit} + @returns: the conversion factor from this unit to another unit + @rtype: C{float} + @raises TypeError: if the units are not compatible + """ + if self.powers != other.powers: + raise TypeError('Incompatible units') + if self.offset != other.offset and self.factor != other.factor: + raise TypeError(('Unit conversion (%s to %s) cannot be expressed ' + + 'as a simple multiplicative factor') % \ + (self.name(), other.name())) + return self.factor/other.factor + + def conversionTupleTo(self, other): # added 1998/09/29 GPW + """ + @param other: another unit + @type other: L{PhysicalUnit} + @returns: the conversion factor and offset from this unit to + another unit + @rtype: (C{float}, C{float}) + @raises TypeError: if the units are not compatible + """ + if self.powers != other.powers: + raise TypeError('Incompatible units') + + # let (s1,d1) be the conversion tuple from 'self' to base units + # (ie. (x+d1)*s1 converts a value x from 'self' to base units, + # and (x/s1)-d1 converts x from base to 'self' units) + # and (s2,d2) be the conversion tuple from 'other' to base units + # then we want to compute the conversion tuple (S,D) from + # 'self' to 'other' such that (x+D)*S converts x from 'self' + # units to 'other' units + # the formula to convert x from 'self' to 'other' units via the + # base units is (by definition of the conversion tuples): + # ( ((x+d1)*s1) / s2 ) - d2 + # = ( (x+d1) * s1/s2) - d2 + # = ( (x+d1) * s1/s2 ) - (d2*s2/s1) * s1/s2 + # = ( (x+d1) - (d1*s2/s1) ) * s1/s2 + # = (x + d1 - d2*s2/s1) * s1/s2 + # thus, D = d1 - d2*s2/s1 and S = s1/s2 + factor = self.factor / other.factor + offset = self.offset - (other.offset * other.factor / self.factor) + return (factor, offset) + + def isCompatible (self, other): # added 1998/10/01 GPW + """ + @param other: another unit + @type other: L{PhysicalUnit} + @returns: C{True} if the units are compatible, i.e. if the powers of + the base units are the same + @rtype: C{bool} + """ + return self.powers == other.powers + + def isDimensionless(self): + return not reduce(lambda a,b: a or b, self.powers) + + def isAngle(self): + return self.powers[7] == 1 and \ + reduce(lambda a,b: a + b, self.powers) == 1 + + def setName(self, name): + self.names = NumberDict() + self.names[name] = 1 + + def name(self): + num = '' + denom = '' + for unit in self.names.keys(): + power = self.names[unit] + if power < 0: + denom = denom + '/' + unit + if power < -1: + denom = denom + '**' + str(-power) + elif power > 0: + num = num + '*' + unit + if power > 1: + num = num + '**' + str(power) + if len(num) == 0: + num = '1' + else: + num = num[1:] + return num + denom + + +# Type checks + +def isPhysicalUnit(x): + """ + @param x: an object + @type x: any + @returns: C{True} if x is a L{PhysicalUnit} + @rtype: C{bool} + """ + return hasattr(x, 'factor') and hasattr(x, 'powers') + +def isPhysicalQuantity(x): + """ + @param x: an object + @type x: any + @returns: C{True} if x is a L{PhysicalQuantity} + @rtype: C{bool} + """ + return hasattr(x, 'value') and hasattr(x, 'unit') + + +# Helper functions + +def _findUnit(unit): + if type(unit) == type(''): + name = string.strip(unit) + unit = eval(name, _unit_table) + for cruft in ['__builtins__', '__args__']: + try: del _unit_table[cruft] + except: pass + + if not isPhysicalUnit(unit): + raise TypeError(str(unit) + ' is not a unit') + return unit + +def _round(x): + if numpy.oldnumeric.greater(x, 0.): + return numpy.oldnumeric.floor(x) + else: + return numpy.oldnumeric.ceil(x) + + +def _convertValue (value, src_unit, target_unit): + (factor, offset) = src_unit.conversionTupleTo(target_unit) + return (value + offset) * factor + + +# SI unit definitions + +_base_names = ['m', 'kg', 's', 'A', 'K', 'mol', 'cd', 'rad', 'sr','EUR'] + +_base_units = [('m', PhysicalUnit('m', 1., [1,0,0,0,0,0,0,0,0,0])), + ('g', PhysicalUnit('g', 0.001, [0,1,0,0,0,0,0,0,0,0])), + ('s', PhysicalUnit('s', 1., [0,0,1,0,0,0,0,0,0,0])), + ('A', PhysicalUnit('A', 1., [0,0,0,1,0,0,0,0,0,0])), + ('K', PhysicalUnit('K', 1., [0,0,0,0,1,0,0,0,0,0])), + ('mol', PhysicalUnit('mol', 1., [0,0,0,0,0,1,0,0,0,0])), + ('cd', PhysicalUnit('cd', 1., [0,0,0,0,0,0,1,0,0,0])), + ('rad', PhysicalUnit('rad', 1., [0,0,0,0,0,0,0,1,0,0])), + ('sr', PhysicalUnit('sr', 1., [0,0,0,0,0,0,0,0,1,0])), + ('EUR', PhysicalUnit('EUR', 1., [0,0,0,0,0,0,0,0,0,1])), + ] + +_prefixes = [('Y', 1.e24), + ('Z', 1.e21), + ('E', 1.e18), + ('P', 1.e15), + ('T', 1.e12), + ('G', 1.e9), + ('M', 1.e6), + ('k', 1.e3), + ('h', 1.e2), + ('da', 1.e1), + ('d', 1.e-1), + ('c', 1.e-2), + ('m', 1.e-3), + ('mu', 1.e-6), + ('n', 1.e-9), + ('p', 1.e-12), + ('f', 1.e-15), + ('a', 1.e-18), + ('z', 1.e-21), + ('y', 1.e-24), + ] + +_unit_table = {} + +for unit in _base_units: + _unit_table[unit[0]] = unit[1] + +_help = [] + +def _addUnit(name, unit, comment=''): + if _unit_table.has_key(name): + raise KeyError, 'Unit ' + name + ' already defined' + if comment: + _help.append((name, comment, unit)) + if type(unit) == type(''): + unit = eval(unit, _unit_table) + for cruft in ['__builtins__', '__args__']: + try: del _unit_table[cruft] + except: pass + unit.setName(name) + _unit_table[name] = unit + +def _addPrefixed(unit): + _help.append('Prefixed units for %s:' % unit) + _prefixed_names = [] + if unit in ['EUR']: + validPrefixes = filter(lambda prefix: prefix[1]>=1000,_prefixes) + else: + validPrefixes = _prefixes + for prefix in validPrefixes: + name = prefix[0] + unit + _addUnit(name, prefix[1]*_unit_table[unit]) + _prefixed_names.append(name) + _help.append(', '.join(_prefixed_names)) + +# SI derived units; these automatically get prefixes +_help.append('SI derived units; these automatically get prefixes:\n' + \ + ', '.join([prefix + ' (%.0E)' % value for prefix, value in _prefixes]) + \ + '\n') + + +_unit_table['kg'] = PhysicalUnit('kg', 1., [0,1,0,0,0,0,0,0,0,0]) + +_addUnit('Hz', '1/s', 'Hertz') +_addUnit('N', 'm*kg/s**2', 'Newton') +_addUnit('Pa', 'N/m**2', 'Pascal') +_addUnit('J', 'N*m', 'Joule') +_addUnit('W', 'J/s', 'Watt') +_addUnit('C', 's*A', 'Coulomb') +_addUnit('V', 'W/A', 'Volt') +_addUnit('F', 'C/V', 'Farad') +_addUnit('ohm', 'V/A', 'Ohm') +_addUnit('S', 'A/V', 'Siemens') +_addUnit('Wb', 'V*s', 'Weber') +_addUnit('T', 'Wb/m**2', 'Tesla') +_addUnit('H', 'Wb/A', 'Henry') +_addUnit('lm', 'cd*sr', 'Lumen') +_addUnit('lx', 'lm/m**2', 'Lux') +_addUnit('Bq', '1/s', 'Becquerel') +_addUnit('Gy', 'J/kg', 'Gray') +_addUnit('Sv', 'J/kg', 'Sievert') + +del _unit_table['kg'] + +for unit in _unit_table.keys(): + _addPrefixed(unit) + +# Fundamental constants +_help.append('Fundamental constants:') + +_unit_table['pi'] = numpy.oldnumeric.pi +_addUnit('c', '299792458.*m/s', 'speed of light') +_addUnit('mu0', '4.e-7*pi*N/A**2', 'permeability of vacuum') +_addUnit('eps0', '1/mu0/c**2', 'permittivity of vacuum') +_addUnit('Grav', '6.67259e-11*m**3/kg/s**2', 'gravitational constant') +_addUnit('hplanck', '6.6260755e-34*J*s', 'Planck constant') +_addUnit('hbar', 'hplanck/(2*pi)', 'Planck constant / 2pi') +_addUnit('e', '1.60217733e-19*C', 'elementary charge') +_addUnit('me', '9.1093897e-31*kg', 'electron mass') +_addUnit('mp', '1.6726231e-27*kg', 'proton mass') +_addUnit('Nav', '6.0221367e23/mol', 'Avogadro number') +_addUnit('k', '1.380658e-23*J/K', 'Boltzmann constant') + +# Time units +_help.append('Time units:') + +_addUnit('min', '60*s', 'minute') +_addUnit('h', '60*min', 'hour') +_addUnit('d', '24*h', 'day') +_addUnit('wk', '7*d', 'week') +_addUnit('yr', '365.25*d', 'year') + +# Length units +_help.append('Length units:') + +_addUnit('inch', '2.54*cm', 'inch') +_addUnit('ft', '12*inch', 'foot') +_addUnit('yd', '3*ft', 'yard') +_addUnit('mi', '5280.*ft', '(British) mile') +_addUnit('nmi', '1852.*m', 'Nautical mile') +_addUnit('Ang', '1.e-10*m', 'Angstrom') +_addUnit('lyr', 'c*yr', 'light year') +_addUnit('AU', '149597870691*m', 'astronomical unit') +_addUnit('pc', '3.08567758128E16*m','parsec') +_addUnit('Bohr', '4*pi*eps0*hbar**2/me/e**2', 'Bohr radius') + +# Area units +_help.append('Area units:') + +_addUnit('ha', '10000*m**2', 'hectare') +_addUnit('acres', 'mi**2/640', 'acre') +_addUnit('b', '1.e-28*m', 'barn') + +# Volume units +_help.append('Volume units:') + +_addUnit('l', 'dm**3', 'liter') +_addUnit('dl', '0.1*l', 'deci liter') +_addUnit('cl', '0.01*l', 'centi liter') +_addUnit('ml', '0.001*l', 'milli liter') +_addUnit('tsp', '4.92892159375*ml', 'teaspoon') +_addUnit('tbsp', '3*tsp', 'tablespoon') +_addUnit('floz', '2*tbsp', 'fluid ounce') +_addUnit('cup', '8*floz', 'cup') +_addUnit('pt', '16*floz', 'pint') +_addUnit('qt', '2*pt', 'quart') +_addUnit('galUS', '4*qt', 'US gallon') +_addUnit('galUK', '4.54609*l', 'British gallon') + +# Mass units +_help.append('Mass units:') + +_addUnit('amu', '1.6605402e-27*kg', 'atomic mass units') +_addUnit('oz', '28.349523125*g', 'ounce') +_addUnit('lb', '16*oz', 'pound') +_addUnit('ton', '2000*lb', 'ton') + +# Concentration units +_help.append('Concentration units:') +_addUnit('M','mol/m**3','molar concentration') +_addUnit('mM','0.001*mol/m**3','millimolar') +_addUnit('muM','10**-6*mol/m**3','micromolar') + +# Force units +_help.append('Force units:') + +_addUnit('dyn', '1.e-5*N', 'dyne (cgs unit)') + +# Energy units +_help.append('Energy units:') + +_addUnit('erg', '1.e-7*J', 'erg (cgs unit)') +_addUnit('eV', 'e*V', 'electron volt') +_addUnit('Hartree', 'me*e**4/16/pi**2/eps0**2/hbar**2', 'Wavenumbers/inverse cm') +_addUnit('Ken', 'k*K', 'Kelvin as energy unit') +_addUnit('cal', '4.184*J', 'thermochemical calorie') +_addUnit('kcal', '1000*cal', 'thermochemical kilocalorie') +_addUnit('cali', '4.1868*J', 'international calorie') +_addUnit('kcali', '1000*cali', 'international kilocalorie') +_addUnit('Btu', '1055.05585262*J', 'British thermal unit') + +_addPrefixed('eV') + +# Power units +_help.append('Power units:') + +_addUnit('hp', '745.7*W', 'horsepower') + +# Pressure units +_help.append('Pressure units:') + +_addUnit('bar', '1.e5*Pa', 'bar (cgs unit)') +_addUnit('dbar', '1.e4*Pa', 'dbar (cgs unit)') +_addUnit('mbar', '1.e2*Pa', 'mbar (cgs unit)') +_addUnit('atm', '101325.*Pa', 'standard atmosphere') +_addUnit('torr', 'atm/760', 'torr = mm of mercury') +_addUnit('psi', '6894.75729317*Pa', 'pounds per square inch') + +# Angle units +_help.append('Angle units:') + +_addUnit('deg', 'pi*rad/180', 'degrees') + +_help.append('Temperature units:') +# Temperature units -- can't use the 'eval' trick that _addUnit provides +# for degC and degF because you can't add units +kelvin = _findUnit ('K') +_addUnit ('degR', '(5./9.)*K', 'degrees Rankine') +_addUnit ('degC', PhysicalUnit (None, 1.0, kelvin.powers, 273.15), + 'degrees Celcius') +_addUnit ('degF', PhysicalUnit (None, 5./9., kelvin.powers, 459.67), + 'degree Fahrenheit') +del kelvin + +_help.append('Old European currencies:') +#Taken from http://www.xe.com/euro.php on 2008-11-05 +_addUnit('ATS', 'EUR/13.7603' ,'Austria, Schilling') +_addUnit('BEF', 'EUR/40.3399' ,'Belgium, Franc') +_addUnit('CYP', 'EUR/0.585274','Cyprus, Pound') +_addUnit('DEM', 'EUR/1.95583' ,'Germany, Deutsche Mark') +_addUnit('ESP', 'EUR/166.386' ,'Spain, Peseta') +_addUnit('FIM', 'EUR/5.94573' ,'Finland, Markka') +_addUnit('FRF', 'EUR/5.94573' ,'France, Franc') +_addUnit('GRD', 'EUR/340.750' ,'Greece, Drachma') +_addUnit('IEP', 'EUR/0.787564','Ireland, Pound') +_addUnit('ITL', 'EUR/1936.27' ,'Italy, Lira') +_addUnit('LUF', 'EUR/40.3399' ,'Luxembourg, Franc') +_addUnit('MTL', 'EUR/0.429300','Malta, Lira') +_addUnit('NLG', 'EUR/2.20371' ,'The Netherlands, Guilder (also called Florin)') +_addUnit('PTE', 'EUR/200.482' ,'Portugal, Escudo') +_addUnit('SIT', 'EUR/239.640' ,'Slovenia, Tolar') +_addUnit('VAL', 'EUR/1936.27' ,'Vatican City, Lira') + +#Get daily updated exchange rates +if rc['fetchCurrencyRates']: + import urllib + from xml.dom import minidom + url = "http://www.ecb.europa.eu/stats/eurofxref/eurofxref-daily.xml" + currencyNames={'USD':'US dollar' , 'JPY':'Japanese yen', + 'BGN':'Bulgarian lev' , 'CZK':'Czech koruna', + 'DKK':'Danish krone' , 'EEK':'Estonian kroon', + 'GBP':'Pound sterling' ,'HUF':'Hungarian forint', + 'LTL':'Lithuanian litas','LVL':'Latvian lats', + 'PLN':'Polish zloty', 'RON':'New Romanian leu', + 'SEK':'Swedish krona', 'SKK':'Slovak koruna', + 'CHF':'Swiss franc', 'ISK':'Icelandic krona', + 'NOK':'Norwegian krone', 'HRK':'Croatian kuna', + 'RUB':'Russian rouble', 'TRY':'New Turkish lira', + 'AUD':'Australian dollar','BRL':'Brasilian real', + 'CAD':'Canadian dollar', 'CNY':'Chinese yuan renminbi', + 'HKD':'Hong Kong dollar','IDR':'Indonesian rupiah', + 'KRW':'South Korean won','MXN':'Mexican peso', + 'MYR':'Malaysian ringgit','NZD':'New Zealand dollar', + 'PHP':'Philippine peso', 'SGD':'Singapore dollar', + 'THB':'Thai baht', 'ZAR':'South African rand'} + try: + doc = minidom.parseString(urllib.urlopen(url).read()) + elements = doc.documentElement.getElementsByTagName('Cube') + for element in elements[2:]: + currency = element.getAttribute('currency').encode('utf8') + _addUnit(currency, + 'EUR/%s' % element.getAttribute('rate').encode('utf8'), + currencyNames[currency]) + print "Added exchange rate of %s for %s." % (elements[1].getAttribute('time'), + [ i.getAttribute('currency').encode('utf8') + for i in elements[2:] ]) + except: + print "WARNING: No daily exchange rates available." + +def description(): + """Return a string describing all available units.""" + s = '' # collector for description text + for entry in _help: + if isinstance(entry, basestring): + # headline for new section + s += '\n' + entry + '\n' + elif isinstance(entry, tuple): + name, comment, unit = entry + s += '%-8s %-26s %s\n' % (name, comment, unit) + else: + # impossible + raise TypeError, 'wrong construction of _help list' + return s + +# add the description of the units to the module's doc string: +__doc__ += '\n' + description() + +# Some demonstration code. Run with "python -i PhysicalQuantities.py" +# to have this available. + +if __name__ == '__main__': + +# from Scientific.N import * + l = PhysicalQuantity(10., 'm') + big_l = PhysicalQuantity(10., 'km') + print big_l + l + t = PhysicalQuantity(314159., 's') + print t.inUnitsOf('d','h','min','s') + + p = PhysicalQuantity # just a shorthand... + + e = p('2.7 Hartree*Nav') + e.convertToUnit('kcal/mol') + print e + print e.inBaseUnits() + + freeze = p('0 degC') + print freeze.inUnitsOf ('degF') + + euro = PhysicalQuantity('1 EUR') + print euro.inUnitsOf('DEM') + print euro.inUnitsOf('USD') + + euroSQM = PhysicalQuantity('19.99 EUR/m**2') + print "%s=%s" % (euroSQM,euroSQM.inUnitsOf('EUR/cm**2')) Modified: trunk/src/pyphant/pyphant/quantities/__init__.py =================================================================== --- trunk/src/pyphant/pyphant/quantities/__init__.py 2010-03-20 21:59:38 UTC (rev 673) +++ trunk/src/pyphant/pyphant/quantities/__init__.py 2010-04-06 10:09:55 UTC (rev 674) @@ -2,7 +2,7 @@ # Copyright (c) 1998-2007, Konrad Hinsen <hi...@cn...> # Copyright (c) 2008-2010, Rectorate of the University of Freiburg -# Copyright (c) 2009-2010, Andreas W. Liehr +# Copyright (c) 2009-2010, Andreas W. Liehr <li...@us...> # All rights reserved. # # Redistribution and use in source and binary forms, with or without @@ -36,6 +36,11 @@ based on the module Scientific.Physics.PhysicalQuantities written by Conrad Hinsen with contributions from Greg Ward. +A comprehensive documentation of these units is given in +Riede et al: On the Communication of Scientific Data: +The Full-Metadata Format, Computer Physics and Communication 181(3), +2010, pp 651--662 (doi:10.1016/j.cpc.2009.11.014), +which defines version 1.1 of the Full-Metadata Format. This module provides a data type that represents a physical quantity together with its unit. It is possible to add and @@ -126,7 +131,7 @@ class Quantity: """ - Physical quantity with units + Quantity with units Quantity instances allow addition, subtraction, multiplication, and division with each other as well as @@ -141,12 +146,12 @@ - sin, cos, tan: applicable only to objects whose unit is compatible with 'rad'. - See the documentation of the quantities.module for a list + See the documentation of the quantities module for a list of the available units. Here is an example on usage: - >>> from quantities.import Quantity as p # short hand + >>> from quantities import Quantity as p # short hand >>> distance1 = p('10 m') >>> distance2 = p('10 km') >>> total = distance1 + distance2 @@ -163,12 +168,12 @@ >>> >>> t = p(314159., 's') >>> # convert to days, hours, minutes, and second: - >>> t2 = t.inUnitsOf('d','h','min','s') + >>> t2 = t.inUnitsOf('d','hr','min','s') >>> t2_print = ' '.join([str(i) for i in t2]) >>> t2_print '3.0 d 15.0 h 15.0 min 59.0 s' >>> - >>> e = p('2.7 Hartree*Nav') + >>> e = p('2.7 Hartree*NA') >>> e.convertToUnit('kcal/mol') >>> e Quantity(1694.2757596034764,'kcal/mol') @@ -237,7 +242,8 @@ return self._sum(other, -1, 1) def __cmp__(self, other): - diff = self._sum(other, 1, -1) + normed = self.inBaseUnits() # Minimizing numerical errors + diff = normed._sum(other, 1, -1) return cmp(diff.value, 0) def __mul__(self, other): @@ -791,21 +797,23 @@ _addUnit('c', '299792458.*m/s', 'speed of light') _addUnit('mu0', '4.e-7*pi*N/A**2', 'permeability of vacuum') _addUnit('eps0', '1/mu0/c**2', 'permittivity of vacuum') -_addUnit('Grav', '6.67259e-11*m**3/kg/s**2', 'gravitational constant') -_addUnit('hplanck', '6.6260755e-34*J*s', 'Planck constant') -_addUnit('hbar', 'hplanck/(2*pi)', 'Planck constant / 2pi') -_addUnit('e', '1.60217733e-19*C', 'elementary charge') -_addUnit('me', '9.1093897e-31*kg', 'electron mass') -_addUnit('mp', '1.6726231e-27*kg', 'proton mass') -_addUnit('Nav', '6.0221367e23/mol', 'Avogadro number') -_addUnit('k', '1.380658e-23*J/K', 'Boltzmann constant') +_addUnit('Fa','96485.3399*C/mol', 'Faraday constant') +_addUnit('G', '6.67428e-11*m**3/kg/s**2', 'gravitational constant') +_addUnit('h', '6.62606896e-34*J*s', 'Planck constant') +_addUnit('hbar', 'h/(2*pi)', 'Planck constant / 2pi') +_addUnit('e', '1.602176487e-19*C', 'elementary charge') +_addUnit('me', '9.10938215e-31*kg', 'electron mass') +_addUnit('mp', '1.672621637e-27*kg', 'proton mass') +_addUnit('NA', '6.02214179e23/mol', 'Avogadro number') +_addUnit('k', '1.3806504e-23*J/K', 'Boltzmann constant') +_addUnit('Ryd','10973731.568527/m','Rydberg constant') # Time units _help.append('Time units:') _addUnit('min', '60*s', 'minute') -_addUnit('h', '60*min', 'hour') -_addUnit('d', '24*h', 'day') +_addUnit('hr', '60*min', 'hour') +_addUnit('d', '24*hr', 'day') _addUnit('wk', '7*d', 'week') _addUnit('yr', '365.25*d', 'year') @@ -820,7 +828,7 @@ _addUnit('Ang', '1.e-10*m', 'Angstrom') _addUnit('lyr', 'c*yr', 'light year') _addUnit('AU', '149597870691*m', 'astronomical unit') -_addUnit('pc', '3.08567758128E16*m','parsec') +_addUnit('pc', '3.0856776E16*m','parsec') _addUnit('Bohr', '4*pi*eps0*hbar**2/me/e**2', 'Bohr radius') # Area units @@ -843,13 +851,13 @@ _addUnit('cup', '8*floz', 'cup') _addUnit('pt', '16*floz', 'pint') _addUnit('qt', '2*pt', 'quart') -_addUnit('galUS', '4*qt', 'US gallon') +_addUnit('galUS', '231*inch**3', 'US gallon') _addUnit('galUK', '4.54609*l', 'British gallon') # Mass units _help.append('Mass units:') -_addUnit('amu', '1.6605402e-27*kg', 'atomic mass units') +_addUnit('u', '1.660538782e-27*kg', 'atomic mass units') _addUnit('oz', '28.349523125*g', 'ounce') _addUnit('lb', '16*oz', 'pound') _addUnit('ton', '2000*lb', 'ton') @@ -870,7 +878,8 @@ _addUnit('erg', '1.e-7*J', 'erg (cgs unit)') _addUnit('eV', 'e*V', 'electron volt') -_addUnit('Hartree', 'me*e**4/16/pi**2/eps0**2/hbar**2', 'Wavenumbers/inverse cm') +_addUnit('Hartree', 'me*e**4/eps0**2/h**2/4', 'Hartree') +_addUnit('invcm', 'h*c/cm', 'Wave-numbers/inverse cm') _addUnit('Ken', 'k*K', 'Kelvin as energy unit') _addUnit('cal', '4.184*J', 'thermochemical calorie') _addUnit('kcal', '1000*cal', 'thermochemical kilocalorie') @@ -984,21 +993,18 @@ # add the description of the units to the module's doc string: __doc__ += '\n' + description() -# Some demonstration code. Run with "python -i quantities.py" -# to have this available. +# Some demonstration code. if __name__ == '__main__': - -# from Scientific.N import * l = Quantity(10., 'm') big_l = Quantity(10., 'km') print big_l + l t = Quantity(314159., 's') - print t.inUnitsOf('d','h','min','s') + print t.inUnitsOf('d','hr','min','s') p = Quantity # just a shorthand... - e = p('2.7 Hartree*Nav') + e = p('2.7 Hartree*NA') e.convertToUnit('kcal/mol') print e print e.inBaseUnits() Modified: trunk/src/pyphant/pyphant/tests/TestParseQuantities.py =================================================================== --- trunk/src/pyphant/pyphant/tests/TestParseQuantities.py 2010-03-20 21:59:38 UTC (rev 673) +++ trunk/src/pyphant/pyphant/tests/TestParseQuantities.py 2010-04-06 10:09:55 UTC (rev 674) @@ -40,13 +40,18 @@ from pyphant.quantities.ParseQuantities import parseDateTime,str2unit from pyphant.quantities import Quantity """ - >>>parseDateTime('2004-08-21 12:00:00+-12h') + >>>parseDateTime('2004-08-21 12:00:00+-12hr') (Quantity(731814.5,'d'), Quantity(0.5,'d')) >>>parseDateTime('2004-08-21 12:00:00') (Quantity(731814.5,'d'), None) """ class TestParseDateTime(unittest.TestCase): - def testWithUncertainty(self): + def testWithError(self): + self.assertEqual(parseDateTime('2004-08-21 12:00:00+-12hr'), + (Quantity(731814.5,'d'), Quantity(0.5,'d')) + ) + + def testWithErrorOldDeprecatedAbbreviation(self): self.assertEqual(parseDateTime('2004-08-21 12:00:00+-12h'), (Quantity(731814.5,'d'), Quantity(0.5,'d')) ) @@ -59,6 +64,21 @@ result = str2unit('1V') self.assertEqual(expected,result) + def setUp(self): + self.inputDict = {'complexJ':'1.0j','Joule':'1.0J'} + + def testJouleValue(self): + """Physical quantities with unit Joule are indicated by 'J'.""" + result = str2unit(self.inputDict['Joule']) + self.assertEqual(result,Quantity(self.inputDict['Joule'])) + + def testHourPlanck(self): + """In FMF 1.0 unit 'h' denotes hours, while in FMF 1.1 'h' denotes the Planck constant.""" + result = str2unit('1h') + self.assertEqual(result,Quantity('6.62606896e-34 J*s')) + result = str2unit('1h',FMFversion='1.0') + self.assertEqual(result,Quantity('3600s')) + if __name__ == "__main__": import sys if len(sys.argv) == 1: Modified: trunk/src/workers/ImageProcessing/ImageProcessing/AutoFocus.py =================================================================== --- trunk/src/workers/ImageProcessing/ImageProcessing/AutoFocus.py 2010-03-20 21:59:38 UTC (rev 673) +++ trunk/src/workers/ImageProcessing/ImageProcessing/AutoFocus.py 2010-04-06 10:09:55 UTC (rev 674) @@ -152,10 +152,10 @@ vol = (self.yxCube & fslice).getVolume() if not isQuantity(vol): vol = float(vol) - yxratio = 2.0 * vol / (fslice.getVolume() + self.yxCube.getVolume()) + yxratio = vol / fslice.getVolume() fszCube = Cube([slice(zvalue - self.ztol, zvalue + self.ztol)]) zvol = (self.zCube & fszCube).getVolume() - # weird notation necessary for PhysicalQuantities + # weird notation necessary for Quantities if yxratio >= self.boundRatio and zvol != 0 * zvol: orCube = self.yxCube | fslice self.yxCube = orCube Modified: trunk/src/workers/OSC/OSC/CompareFields.py =================================================================== --- trunk/src/workers/OSC/OSC/CompareFields.py 2010-03-20 21:59:38 UTC (rev 673) +++ trunk/src/workers/OSC/OSC/CompareFields.py 2010-04-06 10:09:55 UTC (rev 674) @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2008, Rectorate of the University of Freiburg +# Copyright (c) 2008-2009, Rectorate of the University of Freiburg +# Copyright (c) 2009, Andreas W. Liehr (li...@us...) # All rights reserved. # # Redistribution and use in source and binary forms, with or without Modified: trunk/src/workers/OSC/OSC/ComputeFunctional.py =================================================================== --- trunk/src/workers/OSC/OSC/ComputeFunctional.py 2010-03-20 21:59:38 UTC (rev 673) +++ trunk/src/workers/OSC/OSC/ComputeFunctional.py 2010-04-06 10:09:55 UTC (rev 674) @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2008, Rectorate of the University of Freiburg +# Copyright (c) 2008-2009, Rectorate of the University of Freiburg +# Copyright (c) 2009, Andreas W. Liehr (li...@us...) # All rights reserved. # # Redistribution and use in source and binary forms, with or without @@ -46,7 +47,6 @@ from pyphant import quantities import copy - class ComputeFunctional(Worker.Worker): API = 2 VERSION = 1 Modified: trunk/src/workers/OSC/OSC/ErrorEstimator.py =================================================================== --- trunk/src/workers/OSC/OSC/ErrorEstimator.py 2010-03-20 21:59:38 UTC (rev 673) +++ trunk/src/workers/OSC/OSC/ErrorEstimator.py 2010-04-06 10:09:55 UTC (rev 674) @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- # Copyright (c) 2007-2008, Rectorate of the University of Freiburg +# Copyright (c) 2009, Andreas W. Liehr (li...@us...) # All rights reserved. # # Redistribution and use in source and binary forms, with or without Modified: trunk/src/workers/OSC/OSC/EstimateParameter.py =================================================================== --- trunk/src/workers/OSC/OSC/EstimateParameter.py 2010-03-20 21:59:38 UTC (rev 673) +++ trunk/src/workers/OSC/OSC/EstimateParameter.py 2010-04-06 10:09:55 UTC (rev 674) @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2008, Rectorate of the University of Freiburg +# Copyright (c) 2008-2009, Rectorate of the University of Freiburg +# Copyright (c) 2009, Andreas W. Liehr (li...@us...) # All rights reserved. # # Redistribution and use in source and binary forms, with or without @@ -44,7 +45,6 @@ from pyphant import quantities import copy - class EstimateParameter(Worker.Worker): API = 2 VERSION = 1 Modified: trunk/src/workers/OSC/OSC/ExtremumFinder.py =================================================================== --- trunk/src/workers/OSC/OSC/ExtremumFinder.py 2010-03-20 21:59:38 UTC (rev 673) +++ trunk/src/workers/OSC/OSC/ExtremumFinder.py 2010-04-06 10:09:55 UTC (rev 674) @@ -2,6 +2,7 @@ #!Pyphant's ExtremumFinder worker #!------------------------------- # Copyright (c) 2007-2008, Rectorate of the University of Freiburg +# Copyright (c) 2009, Andreas W. Liehr (li...@us...) # All rights reserved. # # Redistribution and use in source and binary forms, with or without Modified: trunk/src/workers/OSC/OSC/OscCurrent.py =================================================================== --- trunk/src/workers/OSC/OSC/OscCurrent.py 2010-03-20 21:59:38 UTC (rev 673) +++ trunk/src/workers/OSC/OSC/OscCurrent.py 2010-04-06 10:09:55 UTC (rev 674) @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright (c) 2006-2008, Rectorate of the University of Freiburg Modified: trunk/src/workers/OSC/OSC/Smoother.py =================================================================== --- trunk/src/workers/OSC/OSC/Smoother.py 2010-03-20 21:59:38 UTC (rev 673) +++ trunk/src/workers/OSC/OSC/Smoother.py 2010-04-06 10:09:55 UTC (rev 674) @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- # Copyright (c) 2007-2008, Rectorate of the University of Freiburg +# Copyright (c) 2009, Andreas W. Liehr (li...@us...) # All rights reserved. # # Redistribution and use in source and binary forms, with or without Modified: trunk/src/workers/OSC/OSC/ThicknessModeller.py =================================================================== --- trunk/src/workers/OSC/OSC/ThicknessModeller.py 2010-03-20 21:59:38 UTC (rev 673) +++ trunk/src/workers/OSC/OSC/ThicknessModeller.py 2010-04-06 10:09:55 UTC (rev 674) @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- # Copyright (c) 2008, Rectorate of the University of Freiburg +# Copyright (c) 2009, Andreas W. Liehr (li...@us...) # All rights reserved. # # Redistribution and use in source and binary forms, with or without Modified: trunk/src/workers/fmfile/fmfile/FMFLoader.py =================================================================== --- trunk/src/workers/fmfile/fmfile/FMFLoader.py 2010-03-20 21:59:38 UTC (rev 673) +++ trunk/src/workers/fmfile/fmfile/FMFLoader.py 2010-04-06 10:09:55 UTC (rev 674) @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Copyright (c) 2008-2009, Rectorate of the University of Freiburg -# Copyright (c) 2009, Andreas W. Liehr (li...@us...) +# Copyright (c) 2009-2010, Andreas W. Liehr (li...@us...) # All rights reserved. # # Redistribution and use in source and binary forms, with or without @@ -325,7 +325,7 @@ def readSingleFile(b, pixelName): _logger.info(u"Parsing file %s." % pixelName) - preParsedData, d = preParseData(b) + preParsedData, d, FMFversion = preParseData(b) from configobj import ConfigObj,ConfigObjError class FMFConfigObj(ConfigObj): _keyword = re.compile(r'''^ # line start @@ -344,7 +344,7 @@ except ConfigObjError,e: from sys import exit exit('%s\nPlease check the syntax of the FMF-file, in particular the correct usage of comments.' % e) - return config2tables(preParsedData, config) + return config2tables(preParsedData, config,FMFversion) def parseBool(value): if value.lower() == 'true': @@ -353,32 +353,34 @@ return False raise AttributeError -_converters = [ - int, - float, - parseBool, - parseVariable, - parseQuantity, - complex, # Complex is checked after variables and quantities, +def getConverters(FMFversion='1.1'): + converters = [ + int, + float, + parseBool, + lambda v: parseVariable(v,FMFversion), + lambda q: parseQuantity(q,FMFversion), + complex, # Complex is checked after variables and quantities, # because 1J is 1 Joule and not an imaginary number. - parse... [truncated message content] |
From: <zk...@us...> - 2010-04-19 18:15:16
|
Revision: 677 http://pyphant.svn.sourceforge.net/pyphant/?rev=677&view=rev Author: zklaus Date: 2010-04-19 18:15:08 +0000 (Mon, 19 Apr 2010) Log Message: ----------- Merge branch 'master' into svn-trunk * master: Removed chaco from visualizers Modified Paths: -------------- trunk/src/pyphant/setup.py trunk/src/workers/ImageProcessing/setup.py trunk/src/workers/OSC/setup.py trunk/src/workers/Statistics/setup.py trunk/src/workers/fmfile/setup.py trunk/src/workers/tools/setup.py Removed Paths: ------------- trunk/src/visualizers/chaco/chaco/ImageVisualizer.py trunk/src/visualizers/chaco/chaco/__init__.py trunk/src/visualizers/chaco/setup.cfg trunk/src/visualizers/chaco/setup.py Modified: trunk/src/pyphant/setup.py =================================================================== --- trunk/src/pyphant/setup.py 2010-04-19 18:06:36 UTC (rev 676) +++ trunk/src/pyphant/setup.py 2010-04-19 18:15:08 UTC (rev 677) @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.5 +#!/usr/bin/env python # -*- coding: utf-8 -*- __id__ = '$Id: $' Deleted: trunk/src/visualizers/chaco/chaco/ImageVisualizer.py =================================================================== --- trunk/src/visualizers/chaco/chaco/ImageVisualizer.py 2010-04-19 18:06:36 UTC (rev 676) +++ trunk/src/visualizers/chaco/chaco/ImageVisualizer.py 2010-04-19 18:15:08 UTC (rev 677) @@ -1,102 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright (c) 2006-2007, Rectorate of the University of Freiburg -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# * Neither the name of the Freiburg Materials Research Center, -# University of Freiburg nor the names of its contributors may be used to -# endorse or promote products derived from this software without specific -# prior written permission. -# -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS -# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED -# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER -# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, -# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, -# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -u""" -""" - -__id__ = "$Id$" -__author__ = "$Author$" -__version__ = "$Revision$" -# $Source$ - -import scipy -import wx -from enthought.chaco2 import api as chaco2 -from enthought.chaco2.tools import api as tools -from enthought.enable2.wx_backend import Window -from pyphant.core.Connectors import TYPE_IMAGE -from enthought.chaco2.default_colormaps \ - import color_map_name_dict - -class PlotFrame(wx.Frame): - name = 'New Image Visualizer' - def __init__(self, fieldContainer, *args, **kw): - kw["size"] = (600,600) - wx.Frame.__init__(*(self,None,)+args, **kw) - data = chaco2.ArrayPlotData() - data.set_data('imagedata', fieldContainer.data.data) - plot = chaco2.Plot(data) - plot.img_plot('imagedata', - xbounds=fieldContainer.dimensions[0].data, - ybounds=fieldContainer.dimensions[1].data, - colormap = color_map_name_dict['jet']) - plot.overlays.append(tools.SimpleZoom(plot, tool_mode="box", always_on=True)) - self.plot_window = Window(parent=self, component=plot) - sizer=wx.BoxSizer(wx.HORIZONTAL) - sizer.Add(self.plot_window.control, 1, wx.EXPAND) - self.SetSizer(sizer) - self.SetAutoLayout(True) - self.Show(True) - return - -## class ImageVisualizer(object): -## name='Image Visualizer' -## def __init__(self, fieldContainer): -## self.fieldContainer = fieldContainer -## self.execute() - -## def execute(self): -## pylab.ioff() -## self.figure = pylab.figure() -## xmin=scipy.amin(self.fieldContainer.dimensions[0].data) -## xmax=scipy.amax(self.fieldContainer.dimensions[0].data) -## ymin=scipy.amin(self.fieldContainer.dimensions[1].data) -## ymax=scipy.amax(self.fieldContainer.dimensions[1].data) - -## pylab.imshow(self.fieldContainer.data, extent=(xmin, xmax, ymin, ymax)) -## pylab.xlabel(self.fieldContainer.dimensions[0].label) -## pylab.ylabel(self.fieldContainer.dimensions[1].label) -## pylab.title(self.fieldContainer.longname) - -## class F(pylab.Formatter): -## def __init__(self, container, *args, **kwargs): -## self.container=container -## def __call__(self, x, pos=None): -## try: -## return str(x*self.container.unit).replace('mu',r'\textmu{}') -## except IndexError, error: -## return str(x) -## ax=pylab.gca() -## pylab.colorbar(format=F(self.fieldContainer)) -## pylab.ion() -## pylab.show() - - Deleted: trunk/src/visualizers/chaco/chaco/__init__.py =================================================================== --- trunk/src/visualizers/chaco/chaco/__init__.py 2010-04-19 18:06:36 UTC (rev 676) +++ trunk/src/visualizers/chaco/chaco/__init__.py 2010-04-19 18:15:08 UTC (rev 677) @@ -1,39 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright (c) 2006-2007, Rectorate of the University of Freiburg -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# * Neither the name of the Freiburg Materials Research Center, -# University of Freiburg nor the names of its contributors may be used to -# endorse or promote products derived from this software without specific -# prior written permission. -# -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS -# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED -# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER -# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, -# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, -# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -u""" -""" - -__id__ = "$Id$" -__author__ = "$Author$" -__version__ = "$Revision$" -# $Source$ - Deleted: trunk/src/visualizers/chaco/setup.cfg =================================================================== --- trunk/src/visualizers/chaco/setup.cfg 2010-04-19 18:06:36 UTC (rev 676) +++ trunk/src/visualizers/chaco/setup.cfg 2010-04-19 18:15:08 UTC (rev 677) @@ -1,4 +0,0 @@ -# -*- coding: utf-8 -*- -[egg_info] -tag_build = .dev -tag_svn_revision = 1 Deleted: trunk/src/visualizers/chaco/setup.py =================================================================== --- trunk/src/visualizers/chaco/setup.py 2010-04-19 18:06:36 UTC (rev 676) +++ trunk/src/visualizers/chaco/setup.py 2010-04-19 18:15:08 UTC (rev 677) @@ -1,32 +0,0 @@ -#!/usr/bin/env python2.5 -# -*- coding: utf-8 -*- - -u""" -Pyphant Chaco Visualizers -This package contains visualizers that make use of the enthought.chaco -package. -""" - -__author__ = "Klaus Zimmermann, Andreas W. Liehr" - -__id__ = '$Id$' -__revision__ = '$Revision: 25 $' - -VERSION = '0.1' - -from setuptools import setup, find_packages - -setup( - name = "Pyphant Chaco Visualizer", - version = VERSION, - author = __author__, - description = __doc__, - install_requires=['pyphant>=0.4alpha3', - 'enthought.chaco2'], - packages = find_packages(), - entry_points = """ - [pyphant.visualizers] - pil.image = chaco.ImageVisualizer:PlotFrame - """ - ) - Modified: trunk/src/workers/ImageProcessing/setup.py =================================================================== --- trunk/src/workers/ImageProcessing/setup.py 2010-04-19 18:06:36 UTC (rev 676) +++ trunk/src/workers/ImageProcessing/setup.py 2010-04-19 18:15:08 UTC (rev 677) @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.5 +#!/usr/bin/env python # -*- coding: utf-8 -*- """ Modified: trunk/src/workers/OSC/setup.py =================================================================== --- trunk/src/workers/OSC/setup.py 2010-04-19 18:06:36 UTC (rev 676) +++ trunk/src/workers/OSC/setup.py 2010-04-19 18:15:08 UTC (rev 677) @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.5 +#!/usr/bin/env python # -*- coding: utf-8 -*- """ Modified: trunk/src/workers/Statistics/setup.py =================================================================== --- trunk/src/workers/Statistics/setup.py 2010-04-19 18:06:36 UTC (rev 676) +++ trunk/src/workers/Statistics/setup.py 2010-04-19 18:15:08 UTC (rev 677) @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.5 +#!/usr/bin/env python # -*- coding: utf-8 -*- """ Modified: trunk/src/workers/fmfile/setup.py =================================================================== --- trunk/src/workers/fmfile/setup.py 2010-04-19 18:06:36 UTC (rev 676) +++ trunk/src/workers/fmfile/setup.py 2010-04-19 18:15:08 UTC (rev 677) @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.5 +#!/usr/bin/env python # -*- coding: utf-8 -*- """ Modified: trunk/src/workers/tools/setup.py =================================================================== --- trunk/src/workers/tools/setup.py 2010-04-19 18:06:36 UTC (rev 676) +++ trunk/src/workers/tools/setup.py 2010-04-19 18:15:08 UTC (rev 677) @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.5 +#!/usr/bin/env python # -*- coding: utf-8 -*- """ This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |