diff -Nru jic-13.41.3/config jic-15.01.1/config --- jic-13.41.3/config 1970-01-01 00:00:00.000000000 +0000 +++ jic-15.01.1/config 2015-01-04 21:32:29.000000000 +0000 @@ -0,0 +1,118 @@ +# JIC Configuration File Version 3 +# +# Put this file into ~/.jic/ directory + +o.browser = 'sensible-browser' +o.editor = 'sensible-editor' + +# default caching mode: 'cached', 'offline', 'online' +o.cache.mode = 'online' + +# how long locally cached data is considered to be fresh +o.cache.ttl = 3600 + +# default CLI mode +o.cl.mode = 'plumbing' + +# location for jic files (config, cache, etc) +o.home.location = '~/.jic/' +o.home.mode = 0700 + +# should searches always be done using server in cached mode +o.query.search_online = False + +# the default server to use +o.server = 'default' + +# set to your server +o.servers.default.url = 'https://some.host.org' + +# set to your user name +o.servers.default.user = 'user.name@some.host.org' + +# set to your password or keep commented out to get a prompt +#o.servers.default.password = '' + +# uncomment the items below for OAuth and set the cert to point to the +# private key, then run the following command to perform the OAuth +# dance: +# $ jic servers dance default +#o.servers.default.oauth.cert = '~/your_private_key_for_jic.pem' +#o.servers.default.oauth.secret = '' +#o.servers.default.oauth.token = '' + +# how long locally cached data is considered to be fresh for this +# specific server +o.servers.default.cache.ttl = 7200 + +# point this to a writable directory in path +o.symlink.location = '~/bin/' +o.symlink.mode = 0777 + +# porcelain mode command definitions are below +o.commands.jadd.aliases = 'add,ad,a' +o.commands.jadd.help = 'add issue comments' +o.commands.jadd.plumbing = 'comments add' + +o.commands.jcr.aliases = 'create,creat,crea,cre,cr,c' + +o.commands.jcr.sc.subtask.aliases = 'sub-task,sub,st,s' +o.commands.jcr.sc.subtask.help = 'create a new sub-task' +o.commands.jcr.sc.subtask.plumbing = 'issues create -e -d -T Sub-task -L Implements' + +o.commands.jcr.sc.blueprint.aliases = 'blue,blu,bl,bp,b' +o.commands.jcr.sc.blueprint.help = 'create a new blueprint' +o.commands.jcr.sc.blueprint.plumbing = 'issues create -e -d -T Blueprint -L Implements' + +o.commands.jed.aliases = 'edit,edi,ed,e' +o.commands.jed.help = 'edit an existing issue' +o.commands.jed.plumbing = 'issues edit -e' + +o.commands.jedc.aliases = 'editc,edic,edc,ec' +o.commands.jedc.help = 'edit issue comments' +o.commands.jedc.plumbing = 'comments edit' + +o.commands.jdel.aliases = 'delete,delet,dele,del,de,d' +o.commands.jdel.help = 'delete issue comments' +o.commands.jdel.plumbing = 'comments delete' + +o.commands.jsh.aliases = 'show,sho,sh,s' +o.commands.jsh.help = 'show essential issue information' +o.commands.jsh.plumbing = 'issue show -p fields' + +o.commands.jsh.sc.comments.aliases = 'comment,commen,comme,comm,com,cmts,cmt,co,c' +o.commands.jsh.sc.comments.help = 'show issue comments' +o.commands.jsh.sc.comments.plumbing = 'issues show -p comments' + +o.commands.jsh.sc.fields.aliases = 'field,fiel,fie,fld,fi,fl,f' +o.commands.jsh.sc.fields.help = 'show issue fields' +o.commands.jsh.sc.fields.plumbing = 'issues show -p fields' + +o.commands.jsh.sc.history.aliases = 'histor,histo,hist,his,hi,h' +o.commands.jsh.sc.history.help = 'show issue change history' +o.commands.jsh.sc.history.plumbing = 'issues show -p history' + +o.commands.jsh.sc.links.aliases = 'link,lin,lnk,li,ln,l' +o.commands.jsh.sc.links.help = 'show issue links' +o.commands.jsh.sc.links.plumbing = 'issues show -p links' + +o.commands.jsh.sc.worklog.aliases = 'worklo,workl,work,wor,wo,w' +o.commands.jsh.sc.worklog.help = 'show the time logged for the issue' +o.commands.jsh.sc.worklog.plumbing = 'issues show -p worklog' + +o.commands.jsh.sc.all.aliases = 'al,a' +o.commands.jsh.sc.all.help = 'show all information for the issue' +o.commands.jsh.sc.all.plumbing = 'issues show -p all' + +o.commands.jls.aliases = 'list,lis,li,ls,l' +o.commands.jls.help = 'list issues reported by or assigned to you' +o.commands.jls.plumbing = 'issues list -f assignee=$me,status],Closed,Resolved[ -f reporter=$me,status],Closed,Resolved[' + +o.commands.jls.sc.assigned.aliases = 'assigne,assign,assig,assi,ass,as,a' +o.commands.jls.sc.assigned.help = 'list issues assigned to you' +o.commands.jls.sc.assigned.plumbing = 'issues list -f assignee=$me' + +o.commands.jls.sc.reported.aliases = 'reporte,report,repor,repo,rep,re,r' +o.commands.jls.sc.reported.help = 'list issues reported by you' +o.commands.jls.sc.reported.plumbing = 'issues list -f reporter=$me' + diff -Nru jic-13.41.3/debian/changelog jic-15.01.1/debian/changelog --- jic-13.41.3/debian/changelog 2014-05-30 07:35:03.000000000 +0000 +++ jic-15.01.1/debian/changelog 2015-01-21 10:17:06.000000000 +0000 @@ -1,3 +1,9 @@ +jic (15.01.1-1) trusty; urgency=medium + + * New upstream release + + -- Fathi Boudra Wed, 21 Jan 2015 12:16:00 +0200 + jic (13.41.3-1) trusty; urgency=low * Initial release diff -Nru jic-13.41.3/debian/control jic-15.01.1/debian/control --- jic-13.41.3/debian/control 2014-05-30 08:36:17.000000000 +0000 +++ jic-15.01.1/debian/control 2015-01-21 10:29:39.000000000 +0000 @@ -2,7 +2,7 @@ Section: python Priority: optional Maintainer: Fathi Boudra -Build-Depends: debhelper (>= 9), python-all (>= 2.6.6-3) +Build-Depends: debhelper (>= 9), dh-python, python-all (>= 2.6.6-3) Standards-Version: 3.9.5 Homepage: https://github.com/ototo/jic diff -Nru jic-13.41.3/debian/docs jic-15.01.1/debian/docs --- jic-13.41.3/debian/docs 2014-05-30 07:34:21.000000000 +0000 +++ jic-15.01.1/debian/docs 2015-01-21 10:26:44.000000000 +0000 @@ -1 +1,3 @@ README +config +docs/command-line-interface diff -Nru jic-13.41.3/debian/manpages jic-15.01.1/debian/manpages --- jic-13.41.3/debian/manpages 1970-01-01 00:00:00.000000000 +0000 +++ jic-15.01.1/debian/manpages 2015-01-21 10:25:04.000000000 +0000 @@ -0,0 +1,2 @@ +man/jic.1 +man/jicml.1 diff -Nru jic-13.41.3/docs/command-line-interface jic-15.01.1/docs/command-line-interface --- jic-13.41.3/docs/command-line-interface 1970-01-01 00:00:00.000000000 +0000 +++ jic-15.01.1/docs/command-line-interface 2015-01-04 21:32:29.000000000 +0000 @@ -0,0 +1,2067 @@ +jic CLI (Command Line Interface) Specification +---------------------------------------------- + +1. ABSTRACT + +In order to help jic users learning its interface quickly and +successfully by reusing their existing experience with other engineering +tools, jic needs a well designed command line interface that is +intuitive to use. + +Key qualities of desired CLI: + +1.1. Well-thought-out structure of the command line + + All commands should be structured the same way, switches should have + the same meaning for all commands (when applicable) and different + functional segments should be grouped together for ease of use. + +1.2. Support for terminal text attributes and colors + + When output goes to a terminal, user defined formatting should be + supported (attributes and colors). + +1.3. Well-thought-out use of pipes + + All commands besides their ability to work with terminals, should + also be capable of both - receiving their input from and putting + their output into shell pipes. + +1.4. Well-thought-out use of command line completion + + All commands should be provided with proper command line completion + for those shells that support this feature (bash is to start with). + +1.5. A decent man page + + A decent CLI tool should have a decent man documentation. + +This document describes such a CLI. + + +2. CLI STRUCTURE + +The structure of command line corresponds to the commonly used one: + + $ jic [] [ [ ...]] + + where: + + + are listed in a separate OPTIONS as well as for each + command further below + + + is one of the following (unambiguous shortened versions + are also accepted): + + equals to "issue" subject of action + + "comment" for dealing with comments + + being one of (non-ambiguous shortened + versions are also accepted): + + "add" for adding a new comment + "delete" for deleting an existing comment + "edit" for editing an existing comment + "reply" for replying to an existing + comment (the quoted text is + inserted for editing) + "show" shows comments as requested + + "configuration" for dealing with configuration settings + + being one of: + + "list" for listing configuration values + "set" for setting configuration values + "show" for showing configuration values + "unset" for resetting configuration + values to their default values + + "issue" for dealing with issues; + this is a default subject of action that + is assumed when omitted + + being one of: + + "clone" for cloning issues + "create" for creating new issues + "delete" for deleting issues + "edit" for editing issues + "fetch" for caching issues locally + "fields" for listing issues' fields + "forget" for removing issues from the + local cache + "list" for listing issues + "link" for linking the issue to another + one + "move" for moving issues between types + and projects + "pull" for refreshing locally cached + issues + "push" for pushing local changes to the + server + "revert" for reverting one or more + changes made to issues + "show" for showing the issue + "status" for showing the status of the + issue (has it been modified + locally and how if so) + "transition" for transitioning issues + into JIRA workflow states + "tree" for showing the issue hierarchy + "unlink" for unlinking the issue from + another ones + + "link" for dealing with links + + being one of: + + "create" for creating links between + issues + "delete" for deleting links between + issues + "list" for listing links for issues + + "list" for dealing with issue lists + + being one of: + + "add" for adding issues into the list + "create" for creating a list of issues + "delete" for deleting a list of issues + "edit" for editing a list of issues + "list" for listing the issue lists + "receive" for creating a list of issues + from the information sent by the + "send" command + "remove" for removing issues from the + "send" for emailing a list of issues + "show" for showing the issues from + issue lists + + "report" for dealing with reports + + being one of: + + "create" for creating a report definition + "delete" for deleting a report definition + "edit" for editing a report definition + "generate" for generating a report + "list" for listing reports + + "server" for managing servers + + being one of: + + "add" for adding a server + "dance" for performing an OAuth-dance + "delete" for deleting a server definition + "edit" for editing a server definition + "list" for listing known servers + "select" for selecting a default server + "show" for showing a server definition + + "template" for managing templates + + being one of: + + "create" for creating a template + "delete" for deleting a template + "edit" for editing a template + "list" for listing templates + "show" for showing templates + + "worklog" for managing work logs + + being one of: + + "add" for adding an entry + "delete" for deleting an entry + "edit" for editing an entry + "list" for listing entries + "show" for showing entries + + +2.1. COMMENT COMMANDS + + 2.1.1. ADD + + TODO: document + + Examples: + + # add a comment using an editor + $ jic comment add -e CARD-100 + + # add a comment using pipes + $ echo "This takes ~ 1 second" | jic comment add CARD-100 + + # add the same comment to a couple of issues + $ jic comment add CARD-100 CARD-101 < [] + + Creates one or more issues as requested using the information + provided by the means of: + + * interactive jicML editing (-e switch) + + A file with jicML representation of the issues is + created and opened in the configured editor for + editing, parsed and executed afterwards. + + Any valid fields/issues can be added into or deleted + from the jicML file and desired issue field values + provided - all the requested modifications will be + performed in JIRA accordingly. + + If the file has not been changed or got truncated to + zero length, no operations will be performed. + + If there were errors reported by JIRA when trying to + apply the changes, an editor will be presented again + with unsuccessful parts of the change decorated with + inlined error messages to allow the user correcting + those errors or canceling the remainder of the changes. + + * standard input + + - if "-t" switch is specified + + Values are read from the standard input and assigned + to the issue fields according to the order of + mentioning of those fields in the template. + + Data from the standard input is expected to be a + steam of valid jicML values delimited by new line + characters; an empty line is treated as a request to + not update the corresponding field. + + If input provides less values than there are fields + in the template, all the remaining fields are left + intact. + + If input provides more values than needed according + to the template, then all the extra values are + appended to the last field's value. + + If input is a stream of jicML name:value pairs, + value assignment is done as in the case of none of + the switches specified. + + - if "-F" switch is specified + + Values are read from the standard input and assigned + to the issue fields listed by this switch according + to the order of listing. + + Data from the standard input is expected to be a + steam of valid jicML values delimited by new line + characters; an empty line is treated as a request + not to update the corresponding field. + + If input provides less values than there are fields + listed for this switch, all the remaining fields are + assigned with the same value as the last one + received from the input. + + If input provides more values than needed according + to the template, then all the extra values are + appended to the last field's value using space as a + delimiter when needed. + + If input is a stream of jicML name:value pairs, + value assignment is done as in the case of none of + the switches specified. + + - if none of the above are specified + + Fields and their respective values are read from the + standard input and assigned to corresponding issue + fields. + + Data from the standard input is expected to be a + steam of valid jicML name:value pairs. + + If there were errors creating the issues in JIRA, the + command prints out the error messages and returns an + error code (TODO: specify). + + New issues have issue keys that are composed according + to the following template: + + -NEW- + + where + + is defined in JIRA when + corresponding project gets created; + it is take from the parent issue + + is a one-based counter for all + the new issues defined in a + single jicML data set + + Relevant switches: + + -d + --down + Use children of the parent issue specified by other + means as parents for newly created issues; the number of + issues created for each parent depends on the number of + issue types specified using "-T" switch. + + -D + --down-from + Use children of the issues specified as parents + for newly created issues; the number of issues created + for each parent depends on the number of issue types + specified using "-T" switch. + + -H + --depth + Process as many levels up or down as specified; zero corresponds + only to the issue specified for -u or -d, immediate + parent/children correspond to depth 1, etc. + + -e + --editor + Invoke an editor to interactively edit the jicML + representation of the issues which is then parsed and + executed. + + If the file is not modified or is empty, the operation + is cancelled and no changes are made. + + -f + --filter + Use only the issues matching the criteria as parents + for newly created issues; if multiple such switches are + specified, those criteria are combined using logical + "or" operation. + + -F + --fields + Edit only the issue fields specified. + + Overrides template (default one or the one specified + explicitly using "-t" switch) if specified using -t + switch. + + -k + --keys + Get issue keys for the issues to be processed using the + same method (an editor or standard input) as the one + used for getting all the other data for the operation. + + In this case all the non-empty lines of the input text + until the first empty line are interpreted as containing + a white-space or comma- separated issue keys; keys + listed this way are appended to the keys specified by + other means (arguments or switches). + + -L + --link-type + Link newly created issues to their respective parent + issues using all the link types specified. + + -n + --number + Create of issues of each type specified. + + -o + --online + Perform an online operation (create issues on the + corresponding server as well as the local cache). + + -O + --offline + perform an offline operation (create issues in the local + cache only) + + -S + --server + Use the JIRA server specified (or the corresponding + cached information). + + -t + --template + Create issues according to the template specified. + + -T + --issue-types + Create as many new issues per each parent as the number + of specified issues types - one for each. If this switch + is not used, then the last one used for the same server + is used again. + + -u --up + Process ancestors of the issues specified; the issues + specified is not processed (unless -s is also being + used). + + Examples: + + # create two child issues of type "Sub-task" for CARD-100 + # using an editor, using the default link type to link newly + # created issues to their parent and a default template + $ jic create -e -T Sub-task -n 2 CARD-100 + + # create child issues of type "Sub-task" one for each child + # issue of the CARD-100 linking new issues to them using + # the link type specified while filling in only the Summary + # field + $ echo "Perform the preliminary research" | \ + jic create -d CARD-100 -H 1 -T Sub-task -L implements \ + -F Summary + + # create two child issues of type "Sub-task" under the + # CARD-100 using stdin to set their summaries + $ echo -e "Do the first step\nDo the second step" | \ + jic create CARD-100 -T Sub-task -L implements -F Summary + + + 2.3.3. DELETE + + TODO: document + + Examples: + + # delete the issue specified + $ jic issue delete CARD-100 + + # delete a couple of issues + $ jic delete CARD-100 CARD-101 + + # delete all the issues who's keys are listed in a text file + # this is an irreversible operation - be careful! + $ jic delete -k < massacre-victims + + + 2.3.4. EDIT + + $ jic edit [] + + Edits one or more issues specified using the information + provided by the means of: + + * file with jicML representation of the issues being edited + opened in the configured editor for editing and parsed + afterwards. + + Any valid fields/issues can be added into or deleted + from the file and issue field values changed to have + corresponding issues updated accordingly; in case if a + new issue is added it will be created too (see the + "issue create" command for details). + + If the file has not been changed or got truncated to + zero length, no operations are performed. + + If there were errors reported by JIRA when trying to + apply the changes, an editor is presented with inlined + error messages to allow the user correcting those errors + or canceling the erroneous bits of the change by not + changing the file. + + * standard input + + - if "-t" switch is specified + + Values are read from the standard input and assigned + to the issue fields according to the order of + mentioning of those fields in the template. + + Data from the standard input is expected to be a + steam of valid jicML values delimited by new line + characters; an empty line is treated as a request to + not update the corresponding field. + + If input provides less values than there are fields + in the template, all the remaining fields are left + intact. + + If input provides more values than needed according + to the template, then all the extra values are + appended to the last field's value. + + If input is a stream of jicML name:value pairs, + value assignment is done as in the case of none of + the switches specified. + + - if "-F" switch is specified + + Values are read from the standard input and assigned + to the issue fields listed by this switch according + to the order of listing. + + Data from the standard input is expected to be a + steam of valid jicML values delimited by new line + characters; an empty line is treated as a request + not to update the corresponding field. + + If input provides less values than there are fields + listed for this switch, all the remaining fields are + assigned with the same value as the last one + received from the input. + + If input provides more values than needed according + to the template, then all the extra values are + appended to the last field's value using space as a + delimiter when needed. + + If input is a stream of jicML name:value pairs, + value assignment is done as in the case of none of + the switches specified. + + - if none of the above are specified + + Fields and their respective values are read from the + standard input and assigned to corresponding issue + fields. + + Data from the standard input is expected to be a + steam of valid jicML name:value pairs. + + If there were errors applying the changes in JIRA, the + command prints out the error messages and returns an + error code (TODO: specify). + + Relevant switches: + + -d + --down + Process children of the parent specified by ; + the issue specified is not processed. + + -H + --depth + Process as many levels up or down as specified; zero + corresponds only to the issue specified for -u or -d, + immediate parent/children correspond to depth 1, etc. + + -e + --editor + Invoke an editor to edit the issues specified according + to the template (default one for the issue type or one + specified explicitly using "-t" switch). + + If file is not modified or is empty, the operation is + cancelled. + + -f + --filter + Show only issues matching the criteria; if multiple + such switches specified, those criteria are combined + using logical "or" operation. + + Parent and child issues around those issues which are + not shown according to the criteria specified using this + switch, are shown as connected (with an indication that + there are skipped issues). + + -F + --fields + Edit only the issue fields specified. + + Overrides template (default or the one specified + explicitly using "-t" switch) if specified. + + -k + --keys + Get issue keys to be processed using the same method + (editor or standard input) as used for getting all the + other data for the operation. + + In this case all the non-empty lines of the input until + the first empty line are interpreted as containing a + white-space or comma- separated issue keys; keys listed + this way are appended to the keys specified by other + means (arguments or switches). + + -o + --online + Perform an online operation (update the data on the + corresponding server as well as the local cache). + + -O + --offline + Perform an offline operation (update the data in the + local cache only). + + -S + --server + Use the JIRA server specified (or the corresponding + cached information). + + -t + --template + Edit issues according to the template specified. + + -u + --up + Process ancestors of the issue specified by ; + the issue specified is not processed. + + Examples: + + # update two issues using an editor and a default template + $ jic edit -e CARD-100 CARD-101 + + # add FixVersion/s value for two cards using pipes + # as only one value is provided (single line of text), it is + # used for all the following fields/issues that are missing + # their own values - effectively copying the value + $ echo "+2014.12" | \ + jic edit -F "FixVersion/s" CARD-100 CARD-101 + + + 2.3.5. FETCH + + TODO: document + + Examples: + + # fetch all the issues assigned to a person + $ jic fetch -f "assigned=some.person@host" + + # fetch all the issues assigned to the user with their + # respective dependencies + $ jic fetch -f "assigned=$me" -L depends --up -H 1 + + # fetch specific issues and all their children linked by + # "implements" link type + $ jic fetch --self --down CARD-100,CARD-101 -L implements + + + 2.3.6. FIELDS + + TODO: document + + Examples: + + # show all issue fields + $ jic issue fields CARD-100 + + # show required fields + $ jic issue fields -p required CARD-100 + + + 2.3.7. FORGET + + TODO: document + + Examples: + + # forget all the locally cached issues + $ jic issue forget -a + + # forget only children of the issue that are linked using + # "implements" link type + $ jic issue forget -D CARD-100 -L implements + + # forget issues listed in MyList and all their + # "implemented by" children + $ jic issue forget -s -d list:MyList + + + 2.3.8. LIST + + $ jic list [] + + Lists one or more issues specified. + + If there are no issue keys specified (either as + argument or as switches) for the command, it shows nothing. + + Relevant switches: + + -d + --down + Process children of the parent specified by ; + the issue specified is not processed. + + -H + --depth + Process as many levels up or down as specified; zero + corresponds only to the issue specified for -u or -d, + immediate parent/children correspond to depth 1, etc. + + -e + Invoke an editor to get (if none are provided) or edit + and confirm the list of issue keys to process; operation + is cancelled if the file is truncated to zero length. + + -f + --filter + Show only issues matching the criterion; if multiple + such switches specified, those criteria are combined + using logical "and" operation. + + -n + --number + List of issues only. + + -o + --online + Perform an online operation (retrieve the data from the + corresponding server). + + -O + --offline + Perform an offline operation (retrieve the data from the + local cache only). + + -s + --self + Also include the issue mentioned for -u or -d. + + -S + --server + Use the JIRA server specified (or the corresponding + cached information). + + -t + --template + Show issues according to the template specified. + + -u + --up + Process ancestors of the issue specified by ; + the issue specified is not processed. + + + An optional comma- or space- separated list of issue keys; + the output of this command is a combination of issues found + according to the criteria specified by switches above and + ones specified by this argument. + + Examples: + + # list all user's assigned issues of "Blueprint" and + # "Sub-task" issue types; action subject is omitted so + # "issue" is assumed + $ jic list -f "assignee=$me" -T "Blueprint,Sub-task" + + # list all user's authored or assigned issues + $ jic issue list -f "assignee=$me" -f "reporter=$me" + + + 2.3.9. LINK + + TODO: document + + Examples: + + # link CARD-101 to CARD-100 using "implements" link type; + # target hierarchy is: + # CARD-100 + # +- is implemented by: CARD-101 + $ jic link -L Implements CARD-101 CARD-100 + + # alternative to the above + $ jic link -L "Implemented by" CARD-100 CARD-101 + + + 2.3.10. MOVE + + TODO: document + + + 2.3.11. PULL + + TODO: document + + Examples: + + # refresh all the locally cached issues from the server + $ jic pull + + # refresh only specific issues from those cached locally + # ones - assigned to the user ones + $ jic pull -f "assignee=$me" + + + 2.3.12. PUSH + + TODO: document + + Examples: + + # push all the local changes to the server + # as the subject of the action is omitted, "issue" is + # assumed + $ jic push + + # push only changes for the listed issues to the server + $ jic issue push CARD-100 CARD-101 + + # push only last change made to CARD-101 to the server + $ jic issue push CARD-101:-1 + + # push listed changes to the server + $ jic issue push CARD-100:1,2,5 CARD-101:2-4 + + # push changes made to the issues listed in MyList + $ jic issue push list:MyList + + + 2.3.13. REVERT + + TODO: document + + Examples: + + # revert the last change for the issue + $ jic issue revert CARD-100:-1 + + # revert all the changes to the card + $ jic issue revert CARD-101 + + # interactively revert changes to the CARD-100; action + # subject is omitted in this case - "issue" assumed + $ jic revert -i CARD-100 + + + 2.3.14. STATUS + + TODO: document + + Examples: + + # show status of all the issues locally modified for JIRA + # server "MyServer"; only those issues which have unpushed + # local changes are shown + $ jic status -S MyServer + + # show status of the CARD-100 and its children + $ jic issue status -s -D CARD-100 + + + 2.3.15. SHOW + + $ jic show [ [...]] + + Shows details for one or more issues specified. + + If equals to "-", standard input is used to get + issue list to process (can be white-space- or comma- separated) + until the EOF. + + If there are no issue keys specified (either as + argument or as switches) for the command, it shows nothing. + + Relevant switches: + + -a + --all + Show all parts of the issues (all fields, links, + comments, history); a synonym to "-p all". + + -d + --down + Process children of the parent specified by ; + the issue specified is not processed. + + -H + --depth + Process as many levels up or down as specified; zero + corresponds only to the issue specified for -u or -d, + immediate parent/children correspond to depth 1, etc. + + -e + --editor + Invoke an editor to get (if none are provided) or edit + and confirm the list of issue keys to process; operation + is cancelled if the file is truncated to zero length. + + -o + --online + Perform an online operation (retrieve the data from the + corresponding server). + + -O + --offline + Perform an offline operation (retrieve the data from the + local cache only). + + -p + --parts + Show all the parts mentioned, options are: + "all", "header", "fields", "comments", "history", + "links". + + -s + --self + Also include the issue mentioned for -u or -d. + + -S + --server + Use the JIRA server specified (or the corresponding + cached information). + + -t + --template + Show issues according to the template specified. + + -u + --up + Process ancestors of the issue specified by ; + the issue specified is not processed. + + Examples: + + # show all parts of an issue + $ jic issue show -a CARD-100 + + # show issue's fields, comments and history + $ jic show -p fields,comments,history CARD-100 + + # show work log in raw format + $ jic show -R -p worklog CARD-100 + + + 2.3.16. TRANSITION + + TODO: document + + Examples: + + # transition the issue into the "In Progress" state + $ jic issue transition CARD-100 "In Progress" + + # transition a set of issues into "Resolved" state with the + # resolution being "Fixed" + $ jic transition CARD-100 Resolved:Fixed + + # transition all the immediate child issues of the issue + # specified interactively using an editor + $ jic transition -D CARD-100 -H 1 -e + + # transition the issue specified and all its children using + # an editor; all possible target states for the issues will + # be listed in jicML file presented for editing + $ jic issue transition -s -d CARD-100 + + + 2.3.17. TREE + + $ jic tree [] + + Shows hierarchy that surrounds the issues specified. + + If there are no issue keys specified (either as + argument or as switches) for the command, it shows nothing. + + Relevant switches: + + -d + --down + Process children of the parents specified. + + -H + --depth + Process as many levels up or down as specified; zero + corresponds only to the issue specified for -u or -d, + immediate parent/children correspond to depth 1, etc. + + -e + --editor + Invoke an editor to get (if none are provided) or edit + and confirm the list of issue keys to process; operation + is cancelled if the file is truncated to zero length. + + -f + --filter + Show only issues matching the criterion; if multiple + such switches specified, those criteria are combined + using logical "and" operation. + + Parent and child issues around those issues which are + not shown (according to the criteria specified using + this switch), still are shown as connected (with an + indication that there are skipped issues). + + -o + --online + Perform an online operation (retrieve the data from the + corresponding server). + + -O + --offline + Perform an offline operation (retrieve the data from the + local cache only). + + -S + --server + Use the JIRA server specified (or the corresponding + cached information). + + -t + --template + Show issues according to the template specified. + + -u + --up + Process ancestors of the issue specified. + + + An optional comma- or space- separated list of issue keys; + the output of this command is a combination of issues found + according to the criteria specified by switches above and + ones specified by this argument. + + Examples: + + # show "implements" tree for the issue + $ jic tree -u -d -L implements CARD-100 + + # show dependencies tree for the issue + $ jic tree -a -L depends CARD-100 + + # show the full hierarchy for the issue (all link types) + $ jic tree -a CARD-100 + + + 2.3.18. UNLINK + + TODO: document + + Examples: + + # Example hierarchy: + # CARD-100 + # | + # +-- implemented by: CARD-101 + # +-- depends on: CARD-101 + + # remove all links between CARD-100 and CARD-101 + $ jic issue unlink CARD-100 CARD-101 + + # remove only dependency between the issues + $ jic unlink -L depends CARD-100 CARD-101 + + +2.4. LINK COMMANDS + + 2.4.1. CREATE + + TODO: document + + Examples: + + # link two issues with "depends" link so that CARD-100 would + # depend on CARD-101 giving the following hierarchy: + # CARD-100 + # | + # *-- depends on: CARD-101 + $ jic link create -L depends CARD-101 CARD-100 + + # link CARD-101/102/103 as implementing CARD-100 giving the + # following hierarchy: + # CARD-100 + # | + # *-- is implemented by: CARD-101 + # *-- is implemented by: CARD-102 + # *-- is implemented by: CARD-103 + $ jic link create -L implements \ + CARD-101,CARD-102,CARD-103 CARD-100 + + + 2.4.2. DELETE + + TODO: document + + Examples: + + # delete all links for the issue + $ jic link delete -a + + # delete "depends" links to the issues that are depending on + # the specified one + $ jic link delete --down -L depends CARD-100 + + # delete "depends" links to the issues the specified issue + # is depending on + $ jic link delete --up -L depends CARD-101 + + + 2.4.3. LIST + + TODO: document + + Examples: + + # list all links for the issue + $ jic link list CARD-100 + + # list upward (towards parent) links including the card + # itself + $ jic link list -s -u CARD-100 + + +2.5. LIST COMMANDS + + 2.5.1. ADD + + TODO: document + + Examples: + + # Add a couple of issues into the issue list named "MyList" + # without comments (comments are to be added later by + # "list edit" command + $ jic list add MyList CARD-100 CARD-101 + + # Add two issues into the issue list named "MyList" together + # with their respective comments + $ jic list add MyList < + --down-from + Process children of the parents specified by ; + the issue specified is not processed. + + is a comma-separated list of issue keys. + + Unless there are link types specified by the "-L" switch, all + link types are included in the traversal operation. + + Down means the following for different link types: + - implements: from the implemented issue to its implementors; + - depends: from the dependee to its dependants; + - clones: from the original to its clone. + + Examples: + + # list the CARD-100 issue and all its children + $ jic list --self --down CARD-100 + + # show all children for KEY-123 and KEY-124 + $ jic show -d KEY-123 KEY-124 + + -e + --editor + Invoke an editor to get (if none are provided) or edit + and confirm the list of issue keys to process; operation + is cancelled if the file is truncated to zero length. + + Specific semantics of this action and type of the information + that is being edited, as well as the reaction on all three + outcomes of editing operation (those being: unmodified file, + modified file, empty file) depend on the subject of action and + an action - see those for details. + + -f + --filter + Show only issues matching the criteria; if multiple such + switches specified, those criteria are combined using logical + "or" operation. + + is a coma-separated list of , which + are combined using logical "and" + + Criterion: + "" + + where + corresponds to JIRA field name + is one of the: + "<" for "less than", + "<=" for "less or equal than", + "=" for "equals", + ">" for "more than", + ">=" for "more or equal than", + "!=" for "not equal", + "[" for "in the list", list follows with the + first symbol used as a delimiter; the list + may be closed by an optional "]" symbol + "]" for "not in the list", list follows with the + first symbol used as a delimiter; the list + may be closed by an optional "[" symbol + is anything following the the end of + the string (with an exception of optional + trailing "]" and "[" if present) + + Examples: + -f "assignee=some.user@some.host" + -f "project=Some Project" + -f "fixVersion[,2014.01,2014.02]" + -f "assignee],someone@host,anotherone@host[" + + -F + --fields + Process only the issue fields specified. + + Overrides template (default or the one specified explicitly + using "-t" switch) if specified. + + -H + --depth + Process as many levels up or down as specified; zero corresponds + only to the issue specified for -u/U or -d/D, immediate + parent/children correspond to depth 1, etc. + + Example: + -H 2 + --depth 1 + + -i + --interactive + Perform operation interactively, confirming every step. + + -j + --json + Work with JSON representation of the data. + + -k + --keys + Get issue keys to be processed using the same method (editor or + standard input) as used for getting all the other data for the + operation. + + -L + --link-type + Provide one or more link types (as a comma-separated list) to + work with; specifics of semantics of this switch depend on the + specific subject of action and an action - see those for + details. + + -m + --message + Provide a message that would be expected from the stdin + otherwise; new lines can be added using "\n". + + -n + --number + Process as many items as specified (e.g. when creating issues). + + -o + --online + Perform an online operation (update the data on the + corresponding server as well as the local cache). + + -O + --offline + Perform an offline operation (update the data in the local cache + only). + + -p + --parts + Show all mentioned parts, options are: + "all", "header", "fields", "comments", "history", "links". + + -P [] + --purge [] + Purge existing information and replace it with the one provided; + specifics of semantics of this switch depend on specific subject + of action and an action - see those for details. + + is a comma-separated list if unique identifiers of + the items to be purged; all items are purged if no argument + is provided. + + -q + --query + Get the list of issues to process by performing the JQL + specified. + + -Q + --query-stdin + Get the list of issues to process by performing the JQL + provided in stdin. + + -r + --raw + Output raw results of the command; typically useful for using in + automation; specifics of semantics of this switch depend on + specific subject of action and an action - see those for + details. + + -R + --range + Specifies which items from the result set should be processed; + specifics of semantics of this switch depend on specific subject + of action and an action - see those for details. + + -s + --self + Also include the issue mentioned for -u or -d. + + -S + --server + Use the JIRA server specified (or the corresponding + cached information). + + -t + --template + Show issues according to the template specified. + + Templates are managed using template commands. + + -T + --issue-types + Provide one or more issue types (as a comma-separated list) to + work with; specifics of semantics of this switch depend on the + specific subject of action and an action - see those for + details. If this switch is not used, then the last one used for + the same server is used again. + + -u + --up + Process ancestors of the issues denoted by other means but not + those issues themselves unless there is a "-s" switch specified. + + Unless there are link types specified by the "-L" switch, all + link types are included in the traversal operation. + + Up means the following for different link types: + - implements: from implementor to its implemented issue; + - depends: from the dependant to its dependee; + - clones: from the the clone to its original. + + Examples: + + # list the CARD-100 issue and all its ancestors + $ jic list --self --up CARD-100 + + # show all ancestors for KEY-123 and KEY-124 + $ jic show -u KEY-123 KEY-124 + + -U + --up-from + Process ancestors of the issue specified by ; + the issue specified is not processed unless the "-s" switch is + specified too. + + is a comma-separated list of issue keys + + Unless there are link types specified by the "-L" switch, all + link types are included in the traversal operation. + + Up means the following for different link types: + - implements: from implementor to its implemented issue; + - depends: from the dependant to its dependee; + - clones: from the the clone to its original. + + Examples: + + # list ancestors of the issue KEY-123 but not the issue + $ jic list --up KEY-123 + + # list ancestors for KEY-123 and KEY-124 together with + # the issues + $ jic list -s -u KEY-123,KEY-124 + + -v + --verbose + Show verbose messages about the actions being performed. + + -V + --version + Show jic's version information. + + +4. jicML + +jicML is a text based data markup language used to create and modify +JIRA issues. It is a lightweight, intuitive and fast to learn. It also +helps minimizing the amount of data duplication when editing multiple +issues. + + 4.1. jicML representation of JIRA issues + + General structure of the jicML representation of JIRA issues has + one optional leading section (shared fields) and repeated pair of + sections (issue key and issue fields with their respective walues) + + -------- 8< -------- 8< -------- 8< -------- 8< -------- + + # this and the next line can be repeated + + -------- 8< -------- 8< -------- 8< -------- 8< -------- + + where <* fields and values> are: + + : + + where + + is imposed by JIRA but in any case can't contain + the ":" symbol + + is explained further below + + Having a allows setting the same value + for a field in all the issues that have their keys mentioned further + in the file. For example, if one would like to set the + "FixVersion/s" and add the same comment for a set of known issues, + besides performing other modifications that are unique for each + issue, a jicML for such an editing operation might look like: + + -------- 8< -------- 8< -------- 8< -------- 8< -------- + FixVersion/s: 2020.01 + Comment: + Moving into the future as the whole team is having a long + sabbatical leave. + + Issue: CARD-101 + Priority: Low + + Issue: CARD-102 + Labels: +MY_LABEL + -------- 8< -------- 8< -------- 8< -------- 8< -------- + + All the fields and their values that are mentioned after an "Issue:" + field and until the next "Issue:" field or the end of the data, are + related to the issue which key was mentioned in the previous + "Issue:" field. Thus, for the example jicML above, the issues + mentioned in it would get the following updates: + + CARD-101 + FixVersion/s: 2020.01 + Comment: + Moving into the future as the whole team is having a + long sabbatical leave. + Priority: Low + + CARD-102 + FixVersion/s: 2020.01 + Comment: + Moving into the future as the whole team is having a + long sabbatical leave. + Labels: +MY_LABEL + + + 4.2. jicML values + + Values in jicML can be represented in three ways: + + - a simple single line of text + + The end of line character is denoting the end of line and is not + included into the value; it is possible to add line breaks using + "\n" combination though: + + examples: + -------- 8< -------- 8< -------- 8< -------- 8< -------- + Field: 12 + Another Field: Some text with spaces + Yet Another Field: Some text with spaces\nand newlines + And Yet Another Field: +A_VALUE + -------- 8< -------- 8< -------- 8< -------- 8< -------- + + - a folded multi-line text + + This is similar folding used for long email headers as defined + in rfc2822 - a long sequence of characters (including + whitespace) is folded with all but the first line having one or + more spaces as their first symbols; when such a value is parsed, + all the lines until the first one with non-whitespace first + character are joined together using one space as a delimiter + + all the new line characters are treated as a whitespace within + the folded value and are replaced with spaces + + Examples: + -------- 8< -------- 8< -------- 8< -------- 8< -------- + Field: + 12 + Another Field: Some text + with spaces + Yet Another Field: + Some text with spaces\n + and newlines\n + consisting of three + lines of text + And Yet Another Field: + +NEW_VALUE, + +ANOTHER_NEW_VALUE, + -OLD_VALUE + -------- 8< -------- 8< -------- 8< -------- 8< -------- + + - a multi-line value within the "{{{" and "}}}" value markers + + In this case, anything that is located between the markers is + treated as the value with just the following exceptions: + + If value starts with whitespace, such whitespace is stripped + away - the following value + + -------- 8< -------- 8< -------- 8< -------- 8< -------- + {{{ + Some text}}} + -------- 8< -------- 8< -------- 8< -------- 8< -------- + + is equal to + + -------- 8< -------- 8< -------- 8< -------- 8< -------- + {{{Some text}}} + -------- 8< -------- 8< -------- 8< -------- 8< -------- + + A simple text will be parsed as a stream of jicML values - one per + line - unless there are lines with leading spaces and some trailing + non-whitespace characters, when value unfolding will be performing, + consuming multiple whitespace prefixed lines into just one value + + + 4.3. Stream of jicML name:value pairs + + For the cases when jic is expecting the user to provide new values + for the fields of the issues that are being created or edited, in + some cases it expects a stream of jicML name:value pairs. The format + of the stream is simple in this case: + + : + ... + + EOF denotes the end of the data. + + Example: + + -------- 8< -------- 8< -------- 8< -------- 8< -------- + Field name 1: This is a first textual value. + Field name 2: This is a second textual value, which is quite long and + is folded using leading space. + Field name 3: {{{ + This is a third value, + which is wrapped into the value markers + and includes new line characters.}}} + -------- 8< -------- 8< -------- 8< -------- 8< -------- + + + 4.4. Stream of jicML values + + For the cases when jic is expecting the user to provide new values + for the fields of the issues that are being created or edited, in + some cases it expects a stream of jicML values. The format of the + stream is simple in this case: + + + ... + + EOF denotes the end of the data. + + Example: + + -------- 8< -------- 8< -------- 8< -------- 8< -------- + This is a first textual value. + This is a second textual value, which is quite long and + is folded using leading space. + {{{ + This is a third value, + which is wrapped into the value markers + and includes new line characters.}}} + -------- 8< -------- 8< -------- 8< -------- 8< -------- + + + 4.3. Representing JIRA comments + + TODO: document + + + 4.4. Representing JIRA links + + TODO: document + + +5. SHELL ENVIRONMENT INTEGRATION + + In order to provide more flexibility for its users jic also supports + shell environment variables. Using specially named variables it is + possible to provide values for jic options: + + JIC_O_ + + Examples: + + $ JIC_O_FILTER="project=CARD" jic list -f "assignee=$me" + + + It is also possible to provide field values for editing operations + (creation, modification) through environment variables using the + following name pattern: + + JIC_F_ + + Please note, all the non-alphanumerical characters should be + replaced with underscore characters ("_"). + + Examples: + + $ JIC_F_REPORTER="another.person@host" jic create -e + + TODO: document + + +6. SHELL COMMAND LINE COMPLETION + + To allow a decent level of integration into the command shell, jic + is providing few levels of command line completion: + - subject of action completion; + - action completion; + - issue key completion (using the local cache and MRU list); + - comment ID completion (using the local cache and MRU list); + - issue and link type completion (using the local cache and MRU + list); + - list and template name completion. + + TODO: document diff -Nru jic-13.41.3/INSTALL jic-15.01.1/INSTALL --- jic-13.41.3/INSTALL 2014-03-19 17:52:36.000000000 +0000 +++ jic-15.01.1/INSTALL 2015-01-04 21:32:29.000000000 +0000 @@ -22,12 +22,26 @@ http://www.pip-installer.org/en/latest/installing.html -3. install jira.python +3. install oauth2 (needed for OAuth) + + $ sudo apt-get install python-oauth2 + + OR + + $ sudo pip install oauth2 + + +4. install requests-oauthlib (needed for OAuth) + + $ sudo pip install requsts-oauthlib + + +5. install jira.python $ sudo pip install jira-python -4. install pycrypto (needed for OAuth) +6. install pycrypto (needed for OAuth) $ sudo pip install pycrypto @@ -51,16 +65,18 @@ # rm -Irf ./tmp -5. install oauth2 (needed for OAuth) +7. optional - install keyring module - $ sudo apt-get install python-oauth2 + If you would like to use your keyring manager to store your JIRA + password intead of using OAuth or storing it in config in plain text + format, you may want to install `keyring` module. If that module is + available on the system and OAuth in not configured, jic will use + keyring store to store passwords. - OR - - $ sudo pip install oauth2 + $ sudo pip install keyring -6. optional - install python-magic +8. optional - install python-magic If you're getting the following error @@ -86,6 +102,14 @@ $ sudo pip install python-magic +9. Put `config` file into ~/.jic/ + + +10. Create symlinks for the porcelain mode commands + + $ jic commands symlink + -On the first attempt to run jic it will generate a default configuration -file at ~/.jicrc and will try to suggest (or even help) you to edit it. +On the first attempt to run jic it will try to import the existing +config at ~/.jicrc. Please use the example config provided to create +your own config. diff -Nru jic-13.41.3/jic jic-15.01.1/jic --- jic-13.41.3/jic 2014-03-19 17:52:36.000000000 +0000 +++ jic-15.01.1/jic 2015-01-04 21:32:29.000000000 +0000 @@ -8,1469 +8,6975 @@ # License: GPLv2 # +# Backlog: +# [ ] add URL to every fetched issue +# [ ] matching field names for flter for online/offline + from __future__ import print_function -from sys import argv, exit, stdout, stderr -from signal import signal, SIGINT +import sys +import argparse +import signal from os.path import expanduser, expandvars, isfile import datetime from tempfile import NamedTemporaryFile import re -from os import remove +import os from getpass import getpass import subprocess +import shlex import base64 import urlparse +import textwrap +import difflib from tlslite.utils import keyfactory import oauth2 as oauth +import datetime +import json +import errno +import traceback +from collections import OrderedDict try: from jira.client import JIRA from jira.exceptions import JIRAError - from jira.resources import Comment + from jira.resources import Comment, Component, Issue, IssueType, \ + Project, Resolution, Status, User, \ + Version, Worklog except ImportError: print('Could not find jira-python module. Please install it as '\ 'described in INSTALL file. Aborting.') - exit(1) + sys.exit(1) -__version__ = '13.41.3' +__version__ = '15.01.1' -OAUTH_APPLICATION_KEY='jic-tool' -DEFAULT_CONFIG_FILE='~/.jicrc' -ASK_FOR_VALUE='' +JIC_PROGRAM_NAME = 'jic' +# Verbosity levels +VERBOSITY_QUIET = 0 +VERBOSITY_ERRORS = 1 +VERBOSITY_WARNINGS = 2 +VERBOSITY_INFO = 3 -class Configuration (object): - """Persistent configuration object.""" - version = '2' - supported_versions = {} # initialized after the class definition - CONFIG_FILE_SIGNATURE = '# JIC Configuration File Version ' +class Util (object): - internal_names = ( - '__options', 'ask_for_value', 'file_exists', - 'generate_file', 'read_from_file', 'edit_file', - 'parse_config_v1', 'parse_config_v1') - - DEFVAL = 0 - SECRET = 1 - AUTOGENERATE = 2 - DESCRIPTION = 3 - - option_definitions = { - # - 'config': ( DEFAULT_CONFIG_FILE, False, False, - 'Configuration file.'), - 'browser': ( 'sensible-browser', False, True, - 'Browser to use when opening issues'), - 'depth': ( 1, False, True, - 'Tree traversal depth for tree related ' - 'operations.'), - 'server': ( ASK_FOR_VALUE, False, True, - 'JIRA server URL.'), - 'user': ( ASK_FOR_VALUE, False, True, - 'JIRA user to use.'), - 'password': ( ASK_FOR_VALUE, True, True, - 'JIRA password to use'), - 'oauth_pair': ( None, True, True, - 'OAuth credentials in the form "token:secret"'\ - ',\nforces application to use OAuth when '\ - 'present'), - 'oauth_cert': ( None, True, True, - 'OAuth private key file (the one that '\ - 'corresponds to the\npublic key used '\ - 'to register JIC in JIRA'), - 'editor': ( 'sensible-editor', False, True, - 'Text editor to be used when needed'), - 'tree_chars': ( '─│<>^v┌┬┐├┼┤└┴┘', False, False, - 'Characters to be used when drawing '\ - 'hierarchies.\nCharacters are: horizontal '\ - 'line, vertical line, left arrow, right '\ - 'arrow,\nup arrow, down arrow, top-left '\ - 'corner, top cross, tor-right corner, left\n'\ - 'cross, middle cross, right cross, '\ - 'bottom-left corner, bottom cross,\n'\ - 'bottom-right corner.'), - } + DIRECTION_UP = u'up' + DIRECTION_DOWN = u'down' + DIRECTION_BOTH = u'both' + DIRECTIONS = (DIRECTION_UP, DIRECTION_DOWN, DIRECTION_BOTH) - def __init__(self): - object.__setattr__(self, '__options', {}) - keys = sorted(Configuration.option_definitions.keys()) - for key in keys: - object.__getattribute__(self, '__options')[key] = \ - Configuration.option_definitions[key][ - Configuration.DEFVAL] - - def file_exists(self, name=None): - """Checks if config file exists""" - file_name = name if name else DEFAULT_CONFIG_FILE - file_name = expanduser(expandvars(file_name)) - return isfile(file_name) + # regexp that matches JIRA issue keys + re_issue_key = re.compile(r'''^\w+-\d+$''') + + # regexp that matches new (not yet created) issue keys + re_new_issue_key = re.compile(r'''^NEW-\w+-\d+$''') + + # regexp to match JIRA comment ids (with a leading issue key or not) + re_comment_id = re.compile(r'''(?:\w+-\d+:)?\d+''') + + + @staticmethod + def is_issue_key(string): + return Util.re_issue_key.match(string) is not None + + + @staticmethod + def is_new_issue_key(string): + return Util.re_new_issue_key.match(string) is not None + + + @staticmethod + def is_comment_id(string): + return Util.re_comment_id.match(string) is not None + + + @staticmethod + def to_int(value): + if not value: + return None + + vt = type(value) + + if vt is int: + return value + + if vt is long: + return int(value) + + if vt in (str, unicode): + try: + value = value.strip().lower() + vl = len(value) + if value[0] == u'0': + if vl == 1: + return 0 + elif value[1] == u'x': + if vl == 2: + return None + else: + return int(value[2:], 16) + elif value[1] == u'o': + if vl == 2: + return None + else: + return int(value[2:], 8) + elif value[1] == u'b': + if vl == 2: + return None + else: + return int(value[2:], 2) + else: + return int(value, 8) + else: + return int(value) + except Exception: + return None + + + @staticmethod + def ensure_dir_access(directory, access, mode): - def read_from_file(self, name=None): - """Reads configuration from a v2 configuration file.""" - opts = object.__getattribute__(self, '__options') try: - version = None - file_name = name if name else DEFAULT_CONFIG_FILE - file_name = expanduser(expandvars(file_name)) - f = open(file_name, 'r') - config_text = f.read() - f.close() - # check version - if not config_text.startswith( - Configuration.CONFIG_FILE_SIGNATURE): - raise IOError( - 'Configuration file %s has no signature ' - 'string. Aborting.' % file_name) + os.makedirs(directory, mode) + except OSError, e: + if e.errno != errno.EEXIST: + return e, False + + if not os.access(directory, access): + return True, False + + return True, True + + + @staticmethod + def get_nested_value(container, path, default=None): + current = container + while path: + if current is None: + return default + + item, _, path = path.partition(u'.') + if not path: + break + + try: + if hasattr(current, '__dict__') and item in current.__dict__: + current = current.__dict__.get(item) + elif item in current: + current = current.get(item) + else: + return default + except Exception: + return default + + try: + if hasattr(current, '__dict__') and item in current.__dict__: + return current.__dict__.get(item) else: - version = \ - config_text[ \ - len(Configuration.CONFIG_FILE_SIGNATURE):]\ - .split()[0] - if version not in Configuration.supported_versions: - raise IOError( - 'Config %s has version %s instead of ' - 'expected %s. Aborting.' % ( - file_name, version, - Configuration.version)) - # call appropriate config parser - if Configuration.supported_versions[version]\ - (self, file_name, config_text): - exit(2) - except IOError, e: - pr(unicode(e)) - exit(2) + return current.get(item) + except Exception: + return default + + + @staticmethod + def get_issue_field_value(issue, field_name, return_object=False): + # TODO: optimize + if not issue: + return None + + value = Util.get_nested_value(issue, field_name) + + if value is None: + value = Util.get_nested_value(issue, 'fields.%s' % field_name) + + if field_name.endswith('updated') \ + or field_name.endswith('created') \ + or field_name.endswith('date'): + value = Util.parse_jira_date(value) + elif not return_object: + tmp = Util.get_nested_value(issue, 'fields.%s.name' % field_name) + if tmp is not None: + value = tmp - # validate/convert configuration data - tree_chars = opts.get('tree_chars', - Configuration.option_definitions['tree_chars'][ - Configuration.DEFVAL]) - if type(tree_chars) != unicode: - tree_chars = tree_chars.decode('utf-8') - opts['tree_chars'] = tree_chars + return value - if opts['oauth_pair'] is not None: - opts['password'] = None - if opts['oauth_cert'] is not None: - try: - f = open(expanduser(expandvars(opts['oauth_cert']))) - key = f.read().strip() - opts['oauth_cert'] = key - except Exception, e: - pre(u'Failed to read private key from %s: %s\n'\ - u'OAuth is not available' % \ - (opts['oauth_cert'], unicode(e))) - exit(1) + @staticmethod + def is_stale(ttl, retrieved_at): + if not retrieved_at: + return True + ts_now = datetime.datetime.utcnow() + age = ts_now - retrieved_at + if ttl < age.total_seconds(): + return True + else: + return False - def parse_config_v1(self, file_name, config_text): - """Parses config and populates __options""" - lineno = 0 - for line in config_text.splitlines(False): - lineno += 1 - stripped = line.strip() - if stripped.startswith('#') or not len(stripped): - continue - parts = stripped.partition('=') - if parts[1] != '=': - pr('%s: line %d: invalid syntax: %s' % \ - (file_name, lineno, line)) - continue - option = parts[0].strip() - value_part = parts[2].strip() - if value_part.startswith('"') or \ - value_part.startswith("'"): - escape = False - value = '' - for char in value_part[1:]: - if escape: - value += char - escape = False + @staticmethod + def unwrap_list_of_lists(val): + if type(val) not in (list,tuple): + return [val,] + + result = [] + for chop in val: + chop = chop[0] if type(chop) in (list,tuple) else chop + for item in chop.split(','): + result.append(item.strip()) + + return result + + + @staticmethod + def generate_sorting_key(field_name): + def anonymous(issue): + return Util.get_issue_field_value(issue, field_name) + return anonymous + + + @staticmethod + def sort_issues(issues, criteria): + if criteria is None: + return issues + # Python sorted() is a stable sort + for criterion in reversed(criteria): + criterion = criterion.strip() + reverse = (criterion[-1] == u'-') + criterion = criterion[:-1] \ + if criterion[-1] in u'-+' \ + else criterion + fn = Util.generate_sorting_key(criterion) + issues = sorted(issues, key=fn, reverse=reverse) + return issues + + + @staticmethod + def get_jic_file(): + running_script = os.path.abspath(sys.argv[0]) + if os.path.islink(running_script): + running_script = os.path.realpath(running_script) + return running_script + + + @staticmethod + def get_jic_location(): + return os.path.dirname(Util.get_jic_file()) + os.sep + + + FILTER_OP_CHARS = '=~!<>[]' + FILTER_OP_CHARS_EX = FILTER_OP_CHARS + '\t ' + FILTER_CRITERION_DELIMITER = ',' + FILTER_VALUE_DELIMITER = ',' + + @staticmethod + def parse_filter_criterion(filter_criteria): + """Parses -f switch payload. + + The filter expression can contain one or more sub-expressions, which + are delimited by a comma. The format of sub-expression is: + + where is one of '=', '!=', '<', '<=', '>', '>=' + + Sub expression can also be: + + + where and are one of '[', ']' + and is a coma-separated list ov strings + + The function returns a tuple of the following structure: + #0 - error message or None + #1 - list of sub expressions, each of which is a tuple: + #0 - name + #1 - operation + #2 - value or list of values depending on operation + """ + + if not filter_criteria: + return (None, ()) + + #filter_criteria = filter_criteria.strip() + + state = 'name' + name = '' + op = '' + value = '' + values = [] + sub_expressions = [] + list_delimiter = '' + total_chars = len(filter_criteria) + + for pos, char in enumerate(filter_criteria, 1): + + if state == 'name': + if char in Util.FILTER_OP_CHARS: + name = name.strip() + state = 'op' + # fall through + elif char != Util.FILTER_CRITERION_DELIMITER: + name += char + if pos == total_chars and len(name.strip()): + return ('missing operator after "%s"' % name, + sub_expressions) + continue + + if state == 'op': + if char == '[': + op = char + state = 'in-list' + if pos == total_chars: + return ('missing value after "%s"' % op, + sub_expressions) + continue + elif char == ']': + op = char + state = 'not-in-list' + if pos == total_chars: + return ('missing value after "%s"' % op, + sub_expressions) + continue + elif char not in Util.FILTER_OP_CHARS_EX: + op = op.replace(' ','').replace('\t','') + if not op: + return ('missing value after "%s"' % name, + sub_expressions) + state = 'value' + # fall through + else: + op += char + if pos == total_chars: + return ('missing value after "%s"' % op, + sub_expressions) + continue + + if state == 'value': + if char == Util.FILTER_CRITERION_DELIMITER: + value = value.strip() + if not value: + return ('missing value after "%s"' % op, + sub_expressions) + sub_expressions.append((name, op, value)) + name = '' + op = '' + value = '' + values = [] + list_delimiter = '' + state = 'name' + continue + else: + value += char + if pos == total_chars: + value = value.strip() + if not value: + return ('missing value after "%s"' % op, + sub_expressions) + sub_expressions.append((name, op, value)) + name = '' + op = '' + value = '' + values = [] + list_delimiter = '' + state = 'name' + continue + + if state == 'in-list': + if char == ']': + return ('empty list of values for "%s"' % name.strip(), + sub_expressions) + elif not char.isspace(): + list_delimiter = char + if pos == total_chars: + return ('unclosed list of values for "%s"' % name.strip(), + sub_expressions) + state = 'in-list-value' + continue + + if state == 'in-list-value': + if char == list_delimiter: + value = value.strip() + if value: + values.append(value) + value = '' + if pos == total_chars: + return ('unclosed list of values for "%s"' % name.strip(), + sub_expressions) continue - if char == '\\': - escape = True + elif char == ']': + value = value.strip() + if value: + values.append(value) + value = '' + sub_expressions.append((name, 'in', values)) + name = '' + op = '' + value = '' + values = [] + list_delimiter = '' + state = 'name' + continue + else: + value += char + if pos == total_chars: + return ('unclosed list of values for "%s"' % name.strip(), + sub_expressions) + continue + + if state == 'not-in-list': + if char == '[': + return ('empty list of values for "%s"' % name.strip(), + sub_expressions) + elif not char.isspace(): + list_delimiter = char + if pos == total_chars: + return ('unclosed list of values for "%s"' % name.strip(), + sub_expressions) + state = 'not-in-list-value' + continue + + if state == 'not-in-list-value': + if char == list_delimiter: + value = value.strip() + if value: + values.append(value) + value = '' + if pos == total_chars: + return ('unclosed list of values for "%s"' % name.strip(), + sub_expressions) continue - if char == value_part[0]: - break + elif char == '[': + value = value.strip() + if value: + values.append(value) + value = '' + sub_expressions.append((name, 'not in', values)) + name = '' + op = '' + value = '' + values = [] + list_delimiter = '' + state = 'name' + continue + else: value += char + if pos == total_chars: + return ('unclosed list of values for "%s"' % name.strip(), + sub_expressions) + continue + + return (None, sub_expressions) + + + @staticmethod + def generate_issue_diff_job(original, updated, field_meta): + batch_job = [] + updated_issues = {} + for issue in updated: + updated_issues[issue['key']] = issue + + for o_issue in original: + issue_job = {} + if o_issue.key not in updated_issues: + continue + + for name, o_value in o_issue.fields.__dict__.iteritems(): + u_value = updated.fields.__dict__.get(name) + if not Util.are_field_values_equal( + o_value, u_value, field_meta): + field_job = Util.generate_field_diff_job( + name, o_value, u_value, + field_meta) + issue_job[field_job[0]] = field_job[1] + if issue_job: + batch_job.append((o_issue.key, issue_job)) + + return batch_job + + + @staticmethod + def generate_field_diff_job(name, original_value, updated_value, + field_meta): + # TODO: implement + return (name, updated_value) + + + @staticmethod + def are_field_values_equal(lhs, rhs, field_meta): + # TODO: implement + return False + + + @staticmethod + def parse_filter_criteria(filter_criteria): + if filter_criteria is None: + return None + + or_groups = [] + for or_group in filter_criteria: + error, criteria = \ + Util.parse_filter_criterion(or_group) + if error: + print('Error parsing filter "%s": %s' % \ + (or_group[0], error)) + return None else: - value = value_part - object.__getattribute__(self, '__options')[option] = value - return 0 + # build a sub-tree + subtree = criteria[-1] + for item in reversed(criteria[:-1]): + subtree = (item, 'and', subtree) + or_groups.append(subtree) + + #build an AST now + result = or_groups[-1] if len(or_groups) else None + for or_group in reversed(or_groups[:-1]): + result = (or_group, 'or', result) + + return result if result else None + + + @staticmethod + def evaluate_expression(issue, variables, expression): + left, op, right = expression + if isinstance(left, tuple): + left = Util.evaluate_expression(issue, variables, left) + + if isinstance(left, basestring): + if left[0] == '$': + left = variables.get(left[1:], left) + else: + val = Util.get_issue_field_value(issue, left) + if val is None: + val = Util.get_issue_field_value(issue, left + 's') + left = val + + if isinstance(right, tuple): + right = Util.evaluate_expression(issue, variables, right) + + if isinstance(right, basestring): + if right[0] == '$': + right = variables.get(right[1:], right) + #else: + #right = Util.get_issue_field_value(issue, right) + + if op == u'=' or op == u'==': + if type(left) in (list,tuple): + for item in left: + val = Util.get_nested_value(item, 'name') + if val and val == right: + return True + return False + else: + return left == right + elif op == u'!=': + return left != right + elif op == u'~' or op == u'==': + return left.find(right) != -1 + elif op == u'!~': + return left.find(right) == -1 + elif op == u'<': + return left < right + elif op == u'<=' or op == u'=<': + return left <= right + elif op == u'>': + return left < right + elif op == u'>=' or op == u'=>': + return left <= right + elif op == u'in': + return left in right + elif op == u'not in': + return left not in right + elif op == u'or': + return left or right + elif op == u'and': + return left and right + + + @staticmethod + def issue_matches_filter(issue, variables, filter): + if not filter: + return True + if not variables: + variables = {} - def parse_config_v2(self, file_name, config_text): - """Parses config and populates __options""" - parsed = {} + filter = Util.expand_filter_variables(filter, variables) + + return Util.evaluate_expression(issue, variables, filter) + + + @staticmethod + def parse_jira_date(string): try: - exec(config_text, globals(), parsed) - for option, value in parsed.iteritems(): - if option == '__options': - pr('Configuration file \'%s\' is trying to '\ - 'redefine the built-in variable %s - ignored' % \ - (file_name, option)) - continue - object.__getattribute__( - self, '__options')[option] = value + return datetime.datetime.strptime(string[:19], '%Y-%m-%dT%H:%M:%S') + except Exception: + return string - except Exception, e: - pr(u'Error parsing configuration file \'%s\': %s' % \ - (file_name, unicode(e))) - return 1 - return 0 - def generate_file(self, name=None): - """Creates a default configuration file""" + @staticmethod + def expand_filter_variables(filter, variables): + if not filter: + return filter + + lhs, op, rhs = filter + if type(lhs) == tuple: + lhs = Util.expand_filter_variables(lhs, variables) + else: + lhs = Util.expand_variables(lhs, variables) + + if type(rhs) == tuple: + rhs = Util.expand_filter_variables(rhs, variables) + else: + rhs = Util.expand_variables(rhs, variables) + + return (lhs, op, rhs) + + + @staticmethod + def expand_variables(value, variables): + if type(value) == list: + return [Util.expand_variables(item, variables) \ + for item in value] + + start = 0 + result = u'' + string = value + nchars = len(string) + while True: + pos = string.find(u'$', start) + if pos == -1: + result += string[start:] + break + result += string[start:pos] + pos += 1 + name = u'' + while pos < nchars and string[pos].isalnum(): + name += string[pos] + pos += 1 + value = variables.get(name) + result += str(value) + start = pos + return result + + + @staticmethod + def edit_file(editor, file_name): try: - file_name = name if name else DEFAULT_CONFIG_FILE - file_name = expanduser(expandvars(file_name)) - f = open(file_name, 'w') - f.write(Configuration.CONFIG_FILE_SIGNATURE + \ - Configuration.version + '\n\n') - keys = sorted(Configuration.option_definitions.keys()) - for key in keys: - value, secret, autogenerate, description = \ - Configuration.option_definitions[key] - if not autogenerate: - continue - for line in description.splitlines(): - f.write('# %s\n' % line) - if type(value) in (str, unicode): - value = '"' + value + '"' - f.write('%s = %s\n\n' % (key, value)) - f.close() - except IOError, e: - pr(unicode(e)) - exit(2) + subprocess.call([editor, file_name]) + return True + except: + return False + + + @staticmethod + def get_from_editor(cfg, editor, initial_text, file_suffix=u''): + file_prefix = u'jic-' + if file_suffix: + file_prefix += file_suffix + u'-' + try: + f_to_edit = NamedTemporaryFile( + mode='wt', prefix=file_prefix, delete=False) + f_to_edit.write(initial_text.encode('utf-8')) + f_to_edit.close() + except Exception, e: + raise RuntimeError( + u'ERROR: Unable to create a file for editing: ' + + str(e)) + + if not Util.edit_file(editor, f_to_edit.name): + return None - def ask_for_value(self, name, secret, description): - """Prompts the user for a value, caches it afterwards.""" - pr(description) - prompt = name + ': ' try: - if secret: - value = getpass(prompt) + f_edited = open(f_to_edit.name) + new_text = f_edited.read().decode('utf-8') + f_edited.close() + os.remove(f_to_edit.name) + except Exception, e: + raise RuntimeError( + u'ERROR: Unable to edit file \'%s\': %s' % ( + f_to_edit.name, str(e))) + + if new_text == initial_text: + return None + + return new_text + + + @staticmethod + def parse_comment_ids(args): + job = [] + current_issue = None + for chunk in args: + for pair in chunk.split(u','): + head, _, tail = pair.partition(u':') + head = head.strip() + tail = tail.strip() + if tail: + current_issue = head + job.append((current_issue, [tail,])) + else: + if Util.is_issue_key(head): + current_issue = head + job.append((current_issue, [])) + elif current_issue: + job[-1][1].append(head) + else: + pre(u'WARNING: \'%s\' is not an issue key '\ + u'- ignored' % head) + return job + + + @staticmethod + def confirm(prompt, strict=False): + if not sys.stdin.isatty(): + # confirmation requires human input + return False + + prompt = prompt + u' ' + response = '' + while response == '': + try: + response = raw_input(prompt) + except EOFError: + response = 'n' + if strict: + if response == 'YES': + return True + elif response != '': + return False else: - value = raw_input(prompt) - except EOFError: - value = None - object.__getattribute__(self, '__options')[name] = value - return value + if response == '': + continue + if response.lower() == 'y': + return True + else: + return False - def edit_file(self): - """Starts text editor to edit config file, reloads it - afterwards.""" - - if not edit_file(self, - expanduser(expandvars( - object.__getattribute__(self, - '__options')['config']))): - self.read_from_file() +# class Util - def __getattribute__(self, name): - if name in Configuration.internal_names: - return object.__getattribute__(self, name) - value = object.__getattribute__(self, '__options')[name] - if value == ASK_FOR_VALUE: - return self.ask_for_value( - name, - Configuration.option_definitions[name][ - Configuration.SECRET], - Configuration.option_definitions[name][ - Configuration.DESCRIPTION]) - else: - return value - def __setattr__(self, name, value): - if name in Configuration.internal_names: - raise RuntimeError('Configuration is read only') - object.__getattribute__(self, '__options')[name] = value +class TextIterator: + """Helps iterating through text lines""" + def __init__(self, text): + self.lines = text.splitlines() + self.nline_next = 0 + self.nline_max = len(self.lines) - 1 - def __str__(self): - string = '' - keys = sorted(Configuration.option_definitions.keys()) - for key in keys: - string += '* %s = "%s"\n' % ( - key, object.__getattribute__(self, '__options')[key]) - return string - -Configuration.supported_versions = { - '1': Configuration.parse_config_v1, - '2': Configuration.parse_config_v2 -} + def __iter__(self): + return self -# class Configuration + def next(self): + if self.nline_next > self.nline_max: + raise StopIteration + else: + nline = self.nline_next + self.nline_next += 1 + return self.lines[nline] -def ctrl_c_handler(signum, frame): - pr('\nInterrupted by user.') - exit(1) -status_code_messages = { - 400: u'Bad Request', - 401: u'Unauthorized', - 403: u'Forbidden', - 404: u'Not Found', - 500: u'Internal Server Error' -} + def previous(self): + if self.nline_next == 0: + raise StopIteration + else: + self.nline_next -= 1 + return self.lines[self.nline_next] -def error_from_status_code(status_code): - return status_code_messages.get(status_code, u'Unknown') +# class TextIterator -def error_message(exception): - if exception is None \ - or 'status_code' not in exception.__dict__: - return u'Unknown error' - return u'%d: %s' % \ - (exception.status_code, - error_from_status_code(exception.status_code)) +class FileTextIterator: -def confirm(prompt, strict=False): - prompt = prompt + u' ' - response = '' - while response == '': - response = raw_input(prompt) - if strict: - if response == 'YES': - return True - elif response != '': - return False - else: - if response == '': - continue - if response.lower() == 'y': - return True + # indexes of self.cache data bits + IDX_FROM = 0 + IDX_TO = 1 + IDX_NLINES = 2 + IDX_LINES = 3 + + """Helps iterating through text lines of a file""" + def __init__(self, file, cache_size=1): + self.file = file + self.previous_line = None + self.last_line = None + self.owns_file = False + if isinstance(file, basestring): + self.file = open(self.file, 'rt') + self.owns_file = True + + + def __iter__(self): + return self + + + def next(self): + if self.previous_line is not None: + self.last_line = self.previous_line + self.previous_line = None + return self.last_line + self.last_line = self.file.readline().decode('utf-8') + #self.last_line = self.read_line() + if self.last_line == u'': + self.last_line = None + raise StopIteration + return self.last_line + + + def previous(self): + if self.previous_line is not None: + raise StopIteration + self.previous_line = self.last_line + return self.last_line + + + def read_line(self): + line = '' + while True: + ch = self.file.read(1) + pre(u'Char: %s (%d)' % (ch, ord(ch) if ch else -1)) + if ch in ('', '\004', '\n'): + return None if not line else line else: - return False + line += ch + + +# class TextIterator + -def pre(string=u'', end=u'\n'): - print(string.encode('utf-8'), file=stderr, end=end) +class JQL (object): -def pr(string=u'', end=u'\n'): - print(string.encode('utf-8'), file=stdout, end=end) + @staticmethod + def emit_order_by(fields): + if not fields: + return u'' -re_issue_key = re.compile(r'''\w+-\d+''') + result = [] + for field in fields: + field = field.strip() + if field[-1] == u'-': + result.append(u'%s desc' % field[:-1]) + elif field[-1] == u'+': + result.append(u'%s asc' % field[:-1]) + else: + result.append(u'%s asc' % field) + + return u'order by ' + u', '.join(result) -def is_issue_id(id): - return re_issue_key.match(id) is not None -re_comment_key = re.compile(r'''\w+-\d+:\d+''') + @staticmethod + def emit_filter(filter, variables): + left, op, right = filter -def is_comment_key(key): - return re_comment_key.match(key) is not None + if isinstance(left, tuple): + left = JQL.emit_filter(left, variables) -def get_issue_type(issue): - if issue.fields.issuetype.name == u'Roadmap Card': - if issue.fields.summary.lower().startswith(u'epic:'): - return u'RME' + if isinstance(right, tuple): + right = JQL.emit_filter(right, variables) + elif isinstance(right, list): + right = '(' + (','.join([('"%s"' % item) \ + for item in right])) + ')' else: - return u'RMC' - elif issue.fields.issuetype.name == u'Engineering card': - return u'EC' - elif issue.fields.issuetype.name == u'Blueprint': - return u'EBP' - elif issue.fields.issuetype.name == u'Sub-task': - return u'EST' - elif issue.fields.issuetype.name == u'New Feature': - return u'EFE' - elif issue.fields.issuetype.name == u'Bug': - return u'BUG' + if right[0] == '$': + right = variables.get(right[1:], left) + right = '"%s"' % right - return issue.fields.issuetype.name + if op in ('and', 'or'): + wrap = True + else: + wrap = False -def parse_date(string): - return datetime.datetime.strptime(string[:19], '%Y-%m-%dT%H:%M:%S') + return '%s%s %s %s%s' % ( + '( ' if wrap else '', + left, op, right, + ' )' if wrap else '') -def format_person(person): - if person is None: - return u'' - else: - return u'%s <%s>' % (person.displayName, person.emailAddress) -def format_diff(value_from, value_to, what): - if value_from is None: - value_from = '' - if value_to is None: - value_to = '' - - f_from = NamedTemporaryFile( - mode='wt', prefix='jic-', delete=False) - f_from.write(value_from.encode('utf-8')) - f_from.close() - - f_to = NamedTemporaryFile( - mode='wt', prefix='jic-', delete=False) - f_to.write(value_to.encode('utf-8')) - f_to.close() - - args = ('diff', '-u', f_from.name, f_to.name) - proc = subprocess.Popen(args, stdout=subprocess.PIPE) - - raw_diff, stderr = proc.communicate() - - remove(f_to.name) - remove(f_from.name) - - diff = u'' - for line in raw_diff.splitlines(): - line = line.decode('utf-8') - if line.startswith(u'\\ No newline') \ - or line.startswith(u'--- ') \ - or line.startswith(u'+++ '): - continue - if line.startswith(u'@@'): - diff += line + u' ' + what + u'\n' + @staticmethod + def parse_jql_statement(jql): + # TODO: implement + return None, None + + + @staticmethod + def replace_variable_with_JQL(name): + if name == '$me': + return 'currentUser()' + return name + +# class JQL + + +class Namespace (object): + + def __init__(self, dictionary=None, mapping=None): + object.__setattr__(self, '_values', {}) + object.__setattr__(self, '_learn', True) + if dictionary: + self.update_from(dictionary, mapping) + + + def cleanup(self): + vals = object.__getattribute__(self, '__dict__').get('_values') + for name in vals.keys(): + value = vals.get(name) + if type(value) == Namespace: + if value.cleanup(): + del vals[name] + + return len(vals) == 0 + + + def learn(self, flag): + current = object.__getattribute__(self, '_learn') + if not flag: + self.cleanup() + for name, val in self.iteritems(): + if type(val) == Namespace: + val.learn(flag) + object.__setattr__(self, '_learn', flag) + + + def get(self, name, default=None): + #print('get_value(%s)' % name, end='') + internal = object.__getattribute__(self, '__dict__') + if name.startswith('_') or name in internal: + return object.__getattribute__(self, name) + + current = internal.get('_values') + path = name + while path: + name, _, path = path.partition('.') + if name in current: + if path: + current = current[name] + continue + #print(' => %s' % current[name]) + return current[name] + break + + #print(' => None') + return default + + + def set(self, name, value): + #print('set(%s, %s)' % (name,value)) + if not name: + return + internal = object.__getattribute__(self, '__dict__') + if name.startswith('__') or name in internal: + return object.__setattr__(self, name, value) + + values = object.__getattribute__(self, '_values') + current = values + previous = current + for subname in name.split('.'): + if subname not in current \ + or type(current[subname]) != Namespace: + current[subname] = Namespace() + previous = current + current = current[subname] + + if type(value) == dict: + val = Namespace(value) + previous[subname] = value + return + #return object.__setattr__(self, name, value) + + + def update_from(self, obj, mapping=None): + """Update values in the namespace hierarchically using the + values from `obj` and translating their names using `mapping` if + present.""" + if not obj: + return + if type(obj) == dict: + iterator = obj.iteritems() + elif type(obj) == argparse.Namespace: + iterator = ( + (key, val) for key, val in obj._get_kwargs() \ + if not key.startswith('_')) + elif type(obj) == type(self): + iterator = obj.itertree() else: - diff += line + u'\n' + raise TypeError(\ + 'only dict and Namespace types are supported') + for path, val in iterator: + if val is not None: + if mapping: + mapped_name = mapping.get(path) + if mapped_name: + if type(mapped_name) in (list,tuple): + mapped_name, convertor = mapped_name + val = convertor(val) + path = mapped_name + self.set(path, val) - return diff -def edit_file(cfg, file_name): - try: - subprocess.call([cfg.editor, file_name]) - return 0 - except: - pr('Could not edit file \'%s\'' % file_name) - return 1 + def __getattribute__(self, name): + #print('__get_attribute__(%s)' % name, end='') + if name.startswith('_') \ + or name in object.__getattribute__(self, '__dict__') \ + or name in object.__getattribute__(self, '__class__').__dict__: + #print(' => %s' % object.__getattribute__(self, name)) + return object.__getattribute__(self, name) -def get_from_editor(cfg, initial_text): - f_to_edit = NamedTemporaryFile( - mode='wt', prefix='jic-', delete=False) - f_to_edit.write(initial_text.encode('utf-8')) - f_to_edit.close() - if not edit_file(cfg, f_to_edit.name): - f_edited = open(f_to_edit.name) - new_content = f_edited.read() - f_edited.close() - remove(f_to_edit.name) - if new_content != initial_text: - return new_content - return None - -def print_comment(cfg, comment): - created = parse_date(comment.created) - updated = parse_date(comment.updated) - - if updated != created: - pr(u'[%s] On %s, %s wrote and\non %s, %s updated:' % ( - comment.id, - created, format_person(comment.author), - updated, format_person(comment.updateAuthor))) - else: - pr(u'[%s] On %s, %s wrote:' % ( - comment.id, - parse_date(comment.created), - format_person(comment.author))) - for line in comment.body.split('\n'): - pr(u'> %s' % line) - pr() + values = object.__getattribute__(self, '_values') + if name not in values: + if object.__getattribute__(self, '_learn'): + values[name] = Namespace() + else: + return None - return 0 + #print(' => %s' % values.get(name)) + return values.get(name) -# TODO: add "Implements:" field -# TODO: add multiline value formatting -def print_issue(cfg, issue, show_body=True, show_links=True, - show_comments=True, show_history=False): - created = parse_date(issue.fields.created) - updated = parse_date(issue.fields.updated) - components = u'' - if issue.fields.components is not None: - for component in issue.fields.components: - if len(components): - components += u', %s' % component.name - else: - components += component.name - - labels = u'' - if issue.fields.labels is not None: - for label in issue.fields.labels: - if len(labels): - labels += u', %s' % label - else: - labels += label - - fixVersions = u'' - for fixVersion in issue.fields.fixVersions: - if len(fixVersions): - fixVersions += u', %s' % fixVersion.name - else: - fixVersions += fixVersion.name - - # TODO: sponsors is using a custom field, make it configurable and - # generic - sponsors = u'' - if 'issue.fields.customfield_10203' in issue.fields.__dict__ \ - and issue.fields.customfield_10203 is not None: - for sponsor in issue.fields.customfield_10203: - if len(sponsors): - sponsors += u', %s' % sponsor.value - else: - sponsors += sponsor.value - - url = cfg.server + '/browse/' + issue.key - - pr( -u'''Issue: %s -Summary: %s -''' % ( - issue.key, - issue.fields.summary - ), end=u'') - if show_body: - pr( -u'''Type: %s -FixVersions: %s -Priority: %s -Status: %s -Resolution: %s -Date: %s UTC / %s UTC -From: %s -To: %s -Project: %s -Components: %s -Labels: %s -Sponsors: %s -URL: %s''' % ( - issue.fields.issuetype.name \ - if issue.fields.issuetype is not None else u'N/A', - fixVersions, - issue.fields.priority.name \ - if issue.fields.priority is not None else u'N/A', - issue.fields.status.name \ - if issue.fields.status is not None else u'N/A', - issue.fields.resolution.name \ - if issue.fields.resolution is not None else u'N/A', - updated, created, - format_person(issue.fields.reporter), - format_person(issue.fields.assignee), - issue.fields.project.name \ - if issue.fields.project is not None else u'N/A', - components, - labels, - sponsors, - url - ), end=u'') - pr() - if issue.fields.description: - pr(u'\n%s\n' % - issue.fields.description.strip()) - else: - pr(u'\n\n') - - if show_links: - pr(u'======== Links ========\n') - print_issue_links(issue, True, True, u' ') - pr() + def __setattr__(self, name, value): + #print('__setattr__(%s, %s)' % (name,value)) + internal = object.__getattribute__(self, '__dict__') + if name.startswith('_') \ + or name in internal: + return object.__setattr__(self, name, value) + #vals = object.__getattribute__(self, '_values') + vals = internal.get('_values') + vals[name] = value - if show_comments: - pr(u'======== Comments ========\n') - for comment in jira.comments(issue): - print_comment(cfg, comment) - - if show_history: - pr('======== Change History ========\n') - for history in issue.changelog.histories: - when = parse_date(history.created) - who = format_person(history.author) - pr(u'On %s, %s changed:' % (when, who)) - for change in history.items: - what = change.field - from_value = change.fromString \ - if change.fromString is not None \ - else change.__dict__['from'] - to_value = change.toString \ - if change.toString is not None \ - else change.to - pr(format_diff(from_value, to_value, what)) + def __getitem__(self, key): + #print('__getitem__(%s) => %s' % ( + #key, object.__getattribute__(self, '_values').get(key)), end='') + return object.__getattribute__(self, '_values').get(key) + + + def __setitem__(self, key, value): + #print('__setitem__(%s, %s)' % (key,value)) + return self.set(key, value) + + + def __delitem__(self, key): + #print('__delitem__(%s)' % key, end='') + del object.__getattribute__(self, '_values')[key] + + + def __contains__(self, path): + name, _, rest = path.partition('.') + vals = object.__getattribute__(self, '_values') + if name not in vals: + return False + option = vals.get(name) + if rest: + return rest in option + return True + + + def __len__(self): + return len(object.__getattribute__(self, '_values')) + + + def __iter__(self): + return sorted( + object.__getattribute__(self, '_values').__iter__()) + + + def iterkeys(self): + return sorted( + object.__getattribute__(self, '_values').iterkeys()) + + + def iteritems(self): + return sorted( + object.__getattribute__(self, '_values').iteritems()) + + + def itertree(self): + """iterate tree leafes - depth first approach""" + + class TreeDepthFirstIterator (object): + + def __init__(self, namespace): + self.queue = [('',namespace),] + + def __iter__(self): + return self + + def next(self): + current_path = '' + while self.queue: + path, item = self.queue.pop(0) + if type(item) == Namespace: + for name, value in reversed(item.iteritems()): + if path: + self.queue.insert( + 0, ('%s.%s' % (path, name), value)) + else: + self.queue.insert( + 0, ('%s' % name, value)) + else: + return path, item + + raise StopIteration() + + return TreeDepthFirstIterator(self) + + + def __str__(self): + res = u'' + for path, value in self.itertree(): + res += u'%s: %s\n' % (path, value) + return res + +# class Namespace + + +class Configuration (object): + + # previous jic versions' configuration file formats + old_version_parsers = {} # initialized after the class definition + CONFIG_FILE_NAME = 'config' + CONFIG_FILE_SIGNATURE = '# JIC Configuration File Version ' + OLD_CONFIGURATION_FILE = '~/.jicrc' + + # jic configuration file format version + version = 3 + + # mapping of old configuration option names to new ones + map_old_options_to_new = {} # initialized after the class definition + + defaults = { + # FS location for configuration file and other data + 'home.location': '~/.jic/', + + # FS mode to be used for home FS location + 'home.mode': 0700, + + # default command line mode; options are: + # 'porcelain' - for high-level commands + # 'plumbing' - for low-level commands + 'cl.mode': 'plumbing', + + # caching mode; options are: + # 'online' - always query from the server, but keep cache + # up to date + # 'cached' - query from the cache if not stale, query from + # the server otherwise + # 'offline' - always query from the cache + 'cache.mode': 'online', + + # cache TTL in seconds + 'cache.ttl': 3600, + + # location of the cache's FS storage; use '+' prefix for paths + # relative to 'home.location' + 'cache.location': '+cache', + + # should searches be done using server in `cached` mode + # (alternative is to search in local cache only) + 'query.search_online': False, + + # default message verbosity + 'display.verbosity': VERBOSITY_WARNINGS, + } + + new_file_defaults = { + # porcelain mode command definitions + 'commands.jadd.aliases': 'add,ad,a', + 'commands.jadd.help': 'add issue comments', + 'commands.jadd.plumbing': 'comments add', + + 'commands.jedc.aliases': 'editc,edic,edc,ec', + 'commands.jedc.help': 'edit issue comments', + 'commands.jedc.plumbing': 'comments edit', + + 'commands.jdel.aliases': 'delete,delet,dele,del,de,d', + 'commands.jdel.help': 'delete issue comments', + 'commands.jdel.plumbing': 'comments delete', + + 'commands.jsh.aliases': 'show,sho,sh,s', + 'commands.jsh.help': 'show essential issue information', + 'commands.jsh.plumbing': 'issue show -p fields', + + 'commands.jsh.sc.comments.aliases': + 'comment,commen,comme,comm,com,cmts,cmt,co,c', + 'commands.jsh.sc.comments.help': 'show issue comments', + 'commands.jsh.sc.comments.plumbing': 'issues show -p comments', + + 'commands.jsh.sc.fields.aliases': 'field,fiel,fie,fld,fi,fl,f', + 'commands.jsh.sc.fields.help': 'show issue fields', + 'commands.jsh.sc.fields.plumbing': 'issues show -p fields', + + 'commands.jsh.sc.history.aliases': 'histor,histo,hist,his,hi,h', + 'commands.jsh.sc.history.help': 'show issue change history', + 'commands.jsh.sc.history.plumbing': 'issues show -p history', + + 'commands.jsh.sc.links.aliases': 'link,lin,lnk,li,ln,l', + 'commands.jsh.sc.links.help': 'show issue links', + 'commands.jsh.sc.links.plumbing': 'issues show -p links', + + 'commands.jsh.sc.all.aliases': 'al,a', + 'commands.jsh.sc.all.help': 'show all information for the issue', + 'commands.jsh.sc.all.plumbing': 'issues show -p all', + + 'commands.jls.aliases': 'list,lis,li,ls,l', + 'commands.jls.help': 'list issues reported by or assigned to you', + 'commands.jls.plumbing': 'issues list -f assignee=$me -f reporter=$me', + + 'commands.jls.sc.assigned.aliases': 'assigne,assign,assig,assi,ass,as,a', + 'commands.jls.sc.assigned.help': 'list issues assigned to you', + 'commands.jls.sc.assigned.plumbing': 'issues list -f assignee=$me', + + 'commands.jls.sc.reported.aliases': 'reporte,report,repor,repo,rep,re,r', + 'commands.jls.sc.reported.help': 'list issues reported by you', + 'commands.jls.sc.reported.plumbing': 'issues list -f reporter=$me', + + 'commands.jed.aliases': 'edit,edi,ed,e', + 'commands.jed.help': 'edit an existing issue', + 'commands.jed.plumbing': 'issues edit -e', + + 'commands.jo.aliases': 'open,ope,op,o', + 'commands.jo.help': 'open issues in a browser', + 'commands.jo.plumbing': 'issues open', + } + + def __init__(self): + # TODO: refactor - try and fix instead of check and try + self.o = self.get_defaults() + + # get home location from env + self.o.home.location = os.environ.get( + 'JIC_O_HOME_LOCATION', self.o.home.location) + self.o.home.mode = Util.to_int(os.environ.get( + 'JIC_O_HOME_MODE', self.o.home.mode)) + self.o.display.verbosity = Util.to_int(os.environ.get( + 'JIC_O_DISPLAY_VERBOSITY', self.o.display.verbosity)) + vset(self.o.display.verbosity) + + home = os.path.expanduser( + os.path.expandvars(self.o.home.location)) + + home_exists, home_accessible = Util.ensure_dir_access( + home, os.R_OK | os.W_OK | os.X_OK, 0700) + + if home_exists != True: + raise home_exists + + if not home_accessible: + raise RuntimeError( + 'Home directory \'%s\' should have \'rwx\' mode' %\ + self.o.home) + + self.config_file = home + os.sep + Configuration.CONFIG_FILE_NAME + if not os.access(self.config_file, os.F_OK): + options = self.get_defaults() + old_options = self.parse_old_config_file() + if old_options: + vpre(VERBOSITY_INFO, u'Imported old version\'s options') + options.update_from(old_options) + new_file_options = Namespace(Configuration.new_file_defaults) + options.update_from(new_file_options) + vpre(VERBOSITY_INFO, u'Creating new config file: %s' %\ + self.config_file) + self.create_new_file(options, self.config_file) + + if sys.stdin.isatty() and sys.stdout.isatty(): + editor = self.o.get('editor', 'sensible-editor') + vpre(VERBOSITY_INFO, + u'Editing new config file \'%s\' in %s' %\ + (self.config_file, editor)) + Util.edit_file(editor, self.config_file) + + vpre(VERBOSITY_INFO, u'Reloading new config file \'%s\'' %\ + self.config_file) + self.o.update_from(self.load_file(self.config_file)) + vset(self.o.display.verbosity) + self.update_from_environment() + + + def freeze(self, freeze=True): + self.o.learn(not freeze) + + + def get_defaults(self): + defaults = Namespace(Configuration.defaults) + defaults.set('symlink.location', Util.get_jic_location()) + return defaults + + + def init_home(self): + home_location = self.o.get('home.location') + if home_location is None: + return False, 'homeless configuration' + home_mode = self.o.get('home.mode') + if home_mode is None: + home_mode = 0700 + home_location = os.path.expanduser( + os.path.expandvars(home_location)) + try: + os.mkdir(home_location, home_mode) + except OSerror, e: + if e.errno != errno.EEXISTS: + return False, e.message + + + def create_new_file(self, options, config_file): + try: + if not options: + options = self.o + if not config_file: + home = os.path.expanduser( + os.path.expandvars(self.o.home.location)) + config_file = home + Configuration.CONFIG_FILE_NAME + + f = open(config_file, 'w') + f.write(Configuration.CONFIG_FILE_SIGNATURE +\ + str(self.version) + '\n\n') + for path, value in options.itertree(): + f.write(u'o.' + path + u' = ' + repr(value) + u'\n') + f.close() + except IOError, e: + # TODO: report error + raise + + + def update_file(self, options): + to_update = {} + + if type(options) == Namespace: + for path, value in options.itertree(): + to_update[path] = value + else: + to_update = options + + try: + home = os.path.expanduser( + os.path.expandvars(self.o.home.location)) + config_file_name = home + Configuration.CONFIG_FILE_NAME + + replacing = False + fi = open(config_file_name, 'r') + fo = open(config_file_name + '.new', 'w') + for line in fi: + if not line.strip() and not replacing: + fo.write(u'\n') + continue + + ch = line[0] + + if ch == u'#' and not replacing: + fo.write(line) + continue + + if not ch.isspace(): + if replacing: + replacing = False + + name, _, value = line.partition(u'=') + if _ != u'=': + fo.write(line) + continue + + name = name.strip() + if not name.startswith(u'o.'): + fo.write(line) + continue + + name = name[2:] + if name in to_update: + value = to_update.get(name) + if isinstance(value, basestring) \ + and value.find('\n') >= 0: + first = True + lines = value.splitlines(True) + max_idx = len(lines) - 1 + for idx, line in enumerate(lines): + if first: + fo.write(u'o.%s = \\\n' % name) + first = False + line = line.replace('\t','\\t').\ + replace('\n','\\n') + fo.write(u' u\'%s\'%s\n' % ( + line, ('\\' if idx != max_idx else ''))) + elif value is not None: + fo.write(u'o.%s = %s\n' % (name, repr(value))) + del to_update[name] + replacing = True + else: + fo.write(line) + + else: + if not replacing: + fo.write(line) + + fo.write(u'\n') + + keys = sorted(to_update.keys()) + for name in keys: + value = to_update.get(name) + if isinstance(value, basestring) \ + and value.find('\n') >= 0: + first = True + lines = value.splitlines(True) + max_idx = len(lines) - 1 + for idx, line in enumerate(lines): + if first: + fo.write(u'o.%s = \\\n' % name) + first = False + line = line.replace('\t','\\t').\ + replace('\n','\\n') + fo.write(u' \'%s\'%s\n' % ( + line, ('\\' if idx != max_idx else ''))) + fo.write + else: + fo.write(u'o.%s = %s\n' % (name, repr(value))) + + fo.write(u'\n') + + fo.close() + fi.close() + os.rename(config_file_name, config_file_name + '~') + os.rename(config_file_name + '.new', config_file_name) + except IOError, e: + # TODO: report error + raise + + + def parse_config_file(self): + return self.load_file() + + + def parse_old_config_file(self): + """Reads configuration from a v2 configuration file.""" + try: + version = None + file_name = Configuration.OLD_CONFIGURATION_FILE + file_name = os.path.expanduser( + os.path.expandvars(file_name)) + f = open(file_name, 'r') + config_text = f.read() + f.close() + # check version + if not config_text.startswith( + Configuration.CONFIG_FILE_SIGNATURE): + raise IOError( + 'Configuration file %s has no signature ' + 'string. Aborting.' % file_name) + else: + version = \ + config_text[ \ + len(Configuration.CONFIG_FILE_SIGNATURE):]\ + .split()[0] + if version not in Configuration.version_parsers: + raise IOError( + '%s - config file format has version %s '\ + 'which is not supported.' % ( + file_name, version)) + # call appropriate config parser + return Configuration.version_parsers[version]\ + (self, file_name, config_text) + except IOError, e: + # TODO: report error properly + return None + + if opts['oauth_cert'] is not None: + try: + cf = opts['oauth_cert'] + if not os.access(cf, os.R_OK | os.W_OK | os.X_OK): + raise RuntimeError('OAuth certificate file \'%s\' '\ + 'should have at least \'r\' mode' % cf) + f = open(os.path.expanduser( + os.path.expandvars(opts['oauth_cert']))) + key = f.read().strip() + opts['oauth_cert'] = key + except Exception, e: + pre(u'Failed to read private key from %s: %s\n'\ + u'OAuth is not available' % \ + (opts['oauth_cert'], unicode(e))) + sys.exit(1) + + + def parse_old_config_v2(self, file_name, config_text): + """Parses config and populates __options""" + result = Namespace() + parsed = {} + try: + exec(config_text, globals(), parsed) + + if parsed['oauth_cert'] is not None: + try: + f = open(os.path.expanduser( + os.path.expandvars(parsed['oauth_cert']))) + key = f.read().strip() + parsed['oauth_cert'] = key + except Exception, e: + pre(u'Failed to read private key from %s: %s\n'\ + u'OAuth is not available' % \ + (parsed['oauth_cert'], unicode(e))) + sys.exit(1) + + for name, value in parsed.iteritems(): + target = self.map_old_options_to_new.get(name) + if target: + if callable(target): + target(result, value) + else: + result.set(target, value) + + result.set('server', 'default') + + except Exception, e: + pre(u'Error parsing configuration file \'%s\': %s' % \ + (file_name, unicode(e))) + return None + + return result + + + def load_file(self, file_name=None): + if not file_name: + file_name = self.o.home.location + os.sep +\ + Configuration.CONFIG_FILE_NAME + + file_name = os.path.expanduser( + os.path.expandvars(file_name)) + + try: + options = Namespace() + parsed = {} + f = open(file_name, 'r') + config_text = f.read() + f.close() + # check version + if not config_text.startswith( + Configuration.CONFIG_FILE_SIGNATURE): + raise IOError( + 'Configuration file %s has no signature ' + 'string. Aborting.' % file_name) + else: + version = Util.to_int(\ + config_text[ \ + len(Configuration.CONFIG_FILE_SIGNATURE):]\ + .split()[0]) + if version != self.version: + raise IOError( + '%s - config file format has version %s '\ + 'which is not supported.' % ( + file_name, version)) + exec(config_text, {'o': options}, parsed) + return options + + except IOError, e: + raise + # TODO: properly handle + except Exception, e: + raise + # TODO: properly handle + + + def update_from_environment(self, env=None): + if env is None: + env = os.environ + for name, value in env.iteritems(): + if name.startswith('JIC_O_'): + option_name = \ + name[6:].strip().replace('_','.').replace('..','_').lower() + self.o[option_name] = value.strip() + + + @staticmethod + def _split_oauth_pair(ns, pair): + """Splits oauth pair into token and secret and stores them in + namespace""" + ns.servers.default.oauth.token,\ + ns.servers.default.oauth.secret = pair.split(':') + + + def __str__(self): + return '%s(%s)' % ( + type(self).__name__, + str(self.o)) + + +Configuration.map_old_options_to_new = { + 'editor': 'editor', + 'browser': 'browser', + 'server': 'servers.default.url', + 'user': 'servers.default.user', + 'password': 'servers.default.password', + 'oauth_pair': Configuration._split_oauth_pair, + 'oauth_cert': 'servers.default.oauth.cert', +} + +Configuration.version_parsers = { + '2': Configuration.parse_old_config_v2, +} + +# class Configuration + +class CommandLine (object): + + UNWRAP_NONE = staticmethod(lambda val: val) + + UNWRAP_SINGLE = staticmethod(lambda val: val[0] if type(val) in (list,tuple) \ + else val) + + UNWRAP_LIST = staticmethod(lambda val: [item[0] if type(item) in (list,tuple) \ + else item \ + for item in val]) + + UNWRAP_LIST_OF_LISTS = staticmethod(Util.unwrap_list_of_lists) + + cl_to_options_map = {} # filled in in the end of class declaration + + def __init__(self, cfg): + # TODO: implement + self.cfg = cfg + #self.porcelain_commands = self._build_commands_from_config( + # cfg.o.commands) + self._build_plumbing_commands() + self._build_porcelain_commands() + + command_path, parsed_cl = self._parse_command_line(cfg.o) + + options = Namespace() + direct_options = Namespace(parsed_cl, + CommandLine.cl_to_options_map) + + if direct_options.cl.mode == 'porcelain': + # translate porcelain command into plumbing one + #command_path, parsed_cl = self._porcelain_into_plumbing( + # TODO: remove magic numbers + if (command_path is None or len(command_path) == 0) \ + and len(parsed_cl.args) > 0: + cfg.o.set('cl.unrecognized', parsed_cl.args) + raise RuntimeError(u'Unrecognized command: %s' % \ + u' '.join(parsed_cl.args)) + + aliases, subcommands, (action_type, action), description = \ + command_path[0] + if action_type != 'plumbing': + # TODO: report error: inconsistent command definition + print('Incorrect command definition for \'%s\'' % + aliases[0]) + sys.exit(1) + # TODO: remove magic numbers + args = shlex.split(action) + args.extend(direct_options.query.args) + direct_options.cl.args = None + command_path, parsed_plumbing_cl = \ + self._parse_modal_command_line(self.plumbing_commands, + args) + options = Namespace(parsed_plumbing_cl, + CommandLine.cl_to_options_map) + + options.update_from(direct_options) + + cfg.o.update_from(options) + + self.command_path = command_path + self.command = command_path[0] if command_path else None + + # normalize configuration values + if cfg.o.get('query.process_all', False): + cfg.o.set('query.parts', ('all',)) + else: + normalized = [] + for part in cfg.o.get('query.parts', ()): + part = part.strip() + if part in ('all', 'al', 'a'): + normalized.append('all') + break + elif part in ('fields', 'field', 'fiel', 'fie', 'fld', + 'fi', 'fl', 'f'): + normalized.append('fields') + elif part in ('comments', 'comment', 'commen', 'comme', + 'comm', 'com', 'cmts', 'cmt', 'co', 'c'): + normalized.append('comments') + elif part in ('history', 'histor', 'histo', 'hist', + 'his', 'hi', 'h'): + normalized.append('history') + elif part in ('links', 'link', 'lin', 'lnk', 'li', 'ln', + 'l'): + normalized.append('links') + elif part in ('worklog', 'worklo', 'workl', 'work', + 'wor', 'wo', 'w'): + normalized.append('worklog') + else: + pre(u'Unrecognized part specified: '\ + u'\'%s\'. Ignored.' % part) + if normalized: + cfg.o.set('query.parts', normalized) + + filter = Util.parse_filter_criteria(cfg.o.get('query.filter')) + cfg.o.set('query.filter', filter) + + # set verbosity + vset(cfg.o.get('display.verbosity', vget())) + + # get values requested from the stdin + + if cfg.o.get('query.get_keys', False): + # read keys from the stdin until an empty line or an EOF + new_keys = [] + while True: + line = sys.stdin.readline() + line = line.strip() + if not line: + break + for chunk in line.split(','): + for key in chunk.split(' '): + if not key: + continue + if not Util.is_issue_key(key) \ + and not Util.is_comment_id(key): + raise RuntimeError( + u'Invalid issue key or comment id provided: \'%s\'' % key) + new_keys.append(key) + + keys = cfg.o.get('query.args') + if type(keys) != list: + keys = new_keys + else: + keys.extend(new_keys) + cfg.o.set('query.args', keys) + + if cfg.o.get('query.from_stdin', False): + stdin_query = sys.stdin.readline().strip() + if stdin_query: + queries = cfg.o.get('query.jql', []) + queries.append(stdin_query) + cfg.o.set('query.jql', queries) + + + def execute(self, cfg): + # TODO: implement + if not self.command: + return + + aliases, subcommands, action_pair, description = self.command + + if not action_pair: + raise RuntimeError( + u'Please specify subcommand for \'%s\'' % aliases[0]) + + action_type, action = action_pair + + if action_type == 'python': + return action(self.cfg) + else: + RuntimeError( + u'Don\'t know how to perform the command %s (%s)' % \ + (aliases[0], action_type)) + + + def _parse_command_line(self, cfg=None, args=None): + """Parses command line taking into account its mode. + + Returns: (subject, command, namespace) + """ + + if not cfg: + cfg = self.cfg + + prog_name = os.path.basename(sys.argv[0]) + if not prog_name or not prog_name.strip(): + return None + + mode = None + if prog_name != JIC_PROGRAM_NAME: + mode = 'porcelain' + + root_parser = self._build_root_parser() + parsed_cl, unparsed_args = root_parser.parse_known_args(args) + if parsed_cl.help: + if parsed_cl.mode: + mode = parsed_cl.mode + if mode is None: + root_parser.print_help() + sys.exit(0) + else: + if mode == 'porcelain': + self._build_modal_parser(self.porcelain_commands).print_help() + elif mode == 'plumbing': + self._build_modal_parser(self.plumbing_commands).print_help() + sys.exit(0) + + if parsed_cl.mode is None: + if mode is None: + mode = cfg.get('cl.mode') + else: + mode = parsed_cl.mode + + args = unparsed_args + + if prog_name != JIC_PROGRAM_NAME: + args.insert(0, prog_name) + + commands = None + if mode == 'porcelain': + commands = self.porcelain_commands + elif mode == 'plumbing': + commands = self.plumbing_commands + + command_path, parsed_cl = \ + self._parse_modal_command_line(commands, args) + + parsed_cl.mode = mode + + return (command_path, parsed_cl) + + + def _parse_modal_command_line(self, command_defs, args): + + modal_parser = self._build_modal_parser(command_defs) + parsed_cl, unparsed_args = modal_parser.parse_known_args(args) + if parsed_cl.help: + modal_parser.print_help() + sys.exit(0) + + consumed_args = 0 + + consumed_args, command_path = \ + self._lookup_command(command_defs, unparsed_args) + + del unparsed_args[:consumed_args] + + parsed_cl.args = unparsed_args + + return (command_path, parsed_cl) + + + def _lookup_command(self, command_defs, args): + """Returns (no_of_consumed_args, command_path)""" + if args is None or len(args) < 1: + return (0, None) + next_arg = 0 + nargs = len(args) + consumed_args = 0 + + command_path = [] + current_level_commands = command_defs + + while current_level_commands: + next_level = False + for command in current_level_commands: + aliases, commands, function, description = command + for alias in aliases: + if alias == args[next_arg]: + command_path.insert(0, command) + next_arg += 1 + consumed_args += 1 + next_level = True + if next_arg >= nargs: + return (consumed_args, command_path) + if type(commands) in (list,tuple): + current_level_commands = commands + break + else: + current_level_commands = None + break + if next_level: + break + if not next_level: + break + + return (consumed_args, command_path) + + def _build_plumbing_commands(self): + self.plumbing_commands = ( + (('comments', 'comment', 'commen', 'comme', 'comm', 'com', + 'co', 'c'), + ( + (('add', 'ad', 'a'), + None, + ('python', cmd_comments_add), + 'add new comments'), + (('delete', 'delet', 'dele', 'del', 'de', 'd'), + None, + ('python', cmd_comments_delete), + 'delete existing comments'), + (('edit', 'edi', 'ed', 'e'), + None, + ('python', cmd_comments_edit), + 'edit existing comments'), + (('list', 'lis', 'li', 'ls', 'l'), + None, + ('python', cmd_comments_list), + 'list comments'), + #(('reply', 'repl', 'rep', 're', 'r'), + #None, + #('python', cmd_comments_reply), + #'reply to existing comments'), + (('show', 'sho', 'sh', 's'), + None, + ('python', cmd_comments_show), + 'show comments'), + ), + None, + 'issue comment related commands' + ), + (('commands', 'command', 'comman', 'comma', 'comm', 'com', + 'co', 'cm'), + ( + #(('add', 'ad', 'a'), + #None, + #('python', lambda cfg, cache, tpl: print('commands:add')), + #'add a command for the porcelain mode'), + #(('delete', 'delet', 'dele', 'del', 'de', 'd'), + #None, + #('python', lambda cfg, cache, tpl: print('commands:delete')), + #'delete a porcelain mode command'), + #(('edit', 'edi', 'ed', 'e'), + #None, + #('python', lambda cfg, cache, tpl: print('commands:edit')), + #'edit a porcelain mode command'), + #(('list', 'lis', 'li', 'ls', 'l'), + #None, + #('python', lambda cfg, cache, tpl: print('commands:list')), + #'list porcelain mode commands'), + #(('show', 'sho', 'sh', 's'), + #None, + #('python', lambda cfg, cache, tpl: print('commands:show')), + #'show a porcelain mode command'), + (('symlink', 'symlin', 'symli', 'syml', 'sym', 'sy', + 'sl', 'ln'), + None, + ('python', cmd_commands_symlink), + 'create porcelain mode symlinks for commands defined'), + ), + None, + 'operations with porcelain mode commands' + ), + (('configuration', 'configuratio', 'configurati', + 'configurat', 'configura', 'configur', 'configu', + 'config', 'confi', 'conf', 'con', 'cfg', 'cf'), + ( + (('edit', 'edi', 'ed', 'e'), + None, + ('python', cmd_configuration_edit), + 'edit configuration file'), + #(('list', 'lis', 'li', 'ls', 'l'), + #None, + #('python', lambda cfg, cache, tpl: print('configuration:list')), + #'list options and their values'), + #(('set', 'se', 's'), + #None, + #('python', lambda cfg, cache, tpl: print('configuration:set')), + #'set options\' values'), + #(('show', 'sho', 'sh'), + #None, + #('python', lambda cfg, cache, tpl: print('configuration:show')), + #'show options'), + #(('unset', 'unse', 'uns', 'un', 'u'), + #None, + #('python', lambda cfg, cache, tpl: print('configuration:unset')), + #'unset options'), + ), + None, + 'configuration related commands' + ), + (('issues', 'issue', 'issu', 'iss', 'is', 'i'), + ( + #(('clone', 'clon', 'clo', 'cl'), + #None, + #('python', lambda cfg, cache, tpl: print('issues:clone')), + #'clone existing issues and edit clones'), + (('create', 'creat', 'crea', 'cre', 'cr', 'c'), + None, + ('python', cmd_issues_create), + 'create new issues'), + #(('delete', 'delet', 'dele', 'del', 'de', 'd'), + #None, + #('python', lambda cfg, cache, tpl: print('issues:delete')), + #'delete existing issues'), + (('edit', 'edi', 'ed', 'e'), + None, + ('python', cmd_issues_edit), + 'edit existing issues'), + (('fetch', 'fetc', 'fet', 'fe', 'f'), + None, + ('python', cmd_issues_fetch), + 'cache issues locally'), + (('fields', 'field', 'fiel', 'fie', 'fi'), + None, + ('python', cmd_issues_fields), + 'show issues\' fields'), + #(('forget', 'forge', 'forg', 'for', 'fo'), + #None, + #('python', cmd_issues_forget), + #'remove issues from local cache'), + (('list', 'lis', 'ls', 'l'), + None, + ('python', cmd_issues_list), + 'list issues'), + #(('link', 'lin', 'ln'), + #None, + #('python', lambda cfg, cache, tpl: print('issues:link')), + #'link issues between each other'), + #(('move', 'mov', 'mo', 'mv', 'm'), + #None, + #('python', lambda cfg, cache, tpl: print('issues:move')), + #'move issues between projects'), + (('open', 'ope', 'op', 'o'), + None, + ('python', cmd_issues_open), + 'open issues in a web browser'), + (('pull', 'pul', 'p'), + None, + ('python', cmd_issues_pull), + 'refresh issues in local cache'), + #(('push', 'pus', 'pu'), + #None, + #('python', lambda cfg, cache, tpl: print('issues:push')), + #'push local changes to the server'), + #(('revert', 'rever', 'reve', 'rev', 're', 'r'), + #None, + #('python', lambda cfg, cache, tpl: print('issues:revert')), + #'revert changes'), + (('show', 'sho', 'sh', 's'), + None, + ('python', cmd_issues_show), + 'show issues in details'), + #(('status', 'statu', 'stat', 'sta', 'st'), + #None, + #('python', lambda cfg, cache, tpl: print('issues:status')), + #'show status of local cache'), + #(('transition', 'transitio', 'transiti', 'transit', + #'transi', 'trans', 'tran', 'tra', 'tr'), + #None, + #('python', lambda cfg, cache, tpl: print('issues:transition')), + #'transition issues between states'), + #(('tree', 'tre', 't'), + #None, + #('python', lambda cfg, cache, tpl: print('issues:tree')), + #'show issue trees'), + #(('unlink', 'unlin', 'unli', 'unl', 'un', 'u'), + #None, + #('python', lambda cfg, cache, tpl: print('issues:unlink')), + #'remove linke between issues'), + ), + None, + 'issue related commands' + ), + #(('link', 'lin', 'lnk'), + #( + #(('create', 'creat', 'crea', 'cre', 'cr', 'c'), + #None, + #('python', lambda cfg, cache, tpl: print('link:create')), + #'create links between issues'), + #(('delete', 'delet', 'dele', 'del', 'de', 'd'), + #None, + #('python', lambda cfg, cache, tpl: print('link:delete')), + #'delete links between issues'), + #(('list', 'lis', 'li', 'ls', 'l'), + #None, + #('python', lambda cfg, cache, tpl: print('link:list')), + #'list links'), + #), + #None, + #'isue link related commands' + #), + #(('list', 'lst', 'li'), + #( + #(('add', 'ad', 'a'), + #None, + #('python', lambda cfg, cache, tpl: print('list:add')), + #'add issues into a list of issues'), + #(('create', 'creat', 'crea', 'cre', 'cr', 'c'), + #None, + #('python', lambda cfg, cache, tpl: print('list:create')), + #'create a local list of issues'), + #(('delete', 'delet', 'dele', 'del', 'de', 'd'), + #None, + #('python', lambda cfg, cache, tpl: print('list:delete')), + #'delete an existing list of issues'), + #(('edit', 'edi', 'ed', 'e'), + #None, + #('python', lambda cfg, cache, tpl: print('list:edit')), + #'edit an existing list of issues'), + #(('list', 'lis', 'li', 'ls', 'l'), + #None, + #('python', lambda cfg, cache, tpl: print('list:list')), + #'list issue lists'), + #(('receive', 'receiv', 'recei', 'rece', 'rec', 'rcv'), + #None, + #('python', lambda cfg, cache, tpl: print('list:receive')), + #'receive a list of issues sent by `send` command'), + #(('remove', 'remov', 'remo', 'rem', 're', 'r'), + #None, + #('python', lambda cfg, cache, tpl: print('list:remove')), + #'remove issues from an existing list of issues'), + #(('send', 'sen', 'snd', 'se'), + #None, + #('python', lambda cfg, cache, tpl: print('list:send')), + #'send a list of issues'), + #(('show', 'sho', 'sh', 's'), + #None, + #('python', lambda cfg, cache, tpl: print('list:show')), + #'show an existing issue list'), + #), + #None, + #'issue list related commands' + #), + #(('report', 'repor', 'repo', 'rep'), + #( + #(('create', 'creat', 'crea', 'cre', 'cr', 'c'), + #None, + #('python', lambda cfg, cache, tpl: print('report:create')), + #'create a report definition'), + #(('delete', 'delet', 'dele', 'del', 'de', 'd'), + #None, + #('python', lambda cfg, cache, tpl: print('report:delete')), + #'delete a report definition'), + #(('edit', 'edi', 'ed', 'e'), + #None, + #('python', lambda cfg, cache, tpl: print('report:edit')), + #'edit a report definition'), + #(('generate', 'generat', 'genera', 'gener', 'gene', + #'gen', 'ge', 'g'), + #None, + #('python', lambda cfg, cache, tpl: print('report:generate')), + #'generate reports'), + #(('list', 'lis', 'li', 'ls', 'l'), + #None, + #('python', lambda cfg, cache, tpl: print('report:list')), + #'list report definitiona'), + #), + #None, + #'report related commands' + #), + (('servers', 'server', 'serve', 'serv', 'ser', 'srv', 'se'), + ( + (('add', 'ad', 'a'), + None, + ('python', cmd_servers_add), + 'register new JIRA server'), + (('dance', 'danc', 'dan', 'da'), + None, + ('python', cmd_servers_dance), + 'perform OAuth authentication with a JIRA server'), + (('delete', 'delet', 'dele', 'del', 'de', 'd'), + None, + ('python', cmd_servers_delete), + 'delete a JIRA server'), + (('edit', 'edi', 'ed', 'e'), + None, + ('python', cmd_servers_add), + 'edit a JIRA server information'), + (('list', 'lis', 'li', 'ls', 'l'), + None, + ('python', cmd_servers_list), + 'list known JIRA servers'), + (('select', 'selec', 'sele', 'sel', 'se'), + None, + ('python', cmd_servers_select), + 'select a server as a default one'), + (('show', 'sho', 'sh', 's'), + None, + ('python', cmd_servers_show), + 'show a JIRA server\'s information'), + ), + None, + 'server connection related commands' + ), + #(('template', 'templat', 'templa', 'templ', 'temp', 'tem', + #'tpl', 'te'), + #( + #(('create', 'creat', 'crea', 'cre', 'cr', 'c'), + #None, + #('python', lambda cfg, cache, tpl: print('template:create')), + #'create a new template'), + #(('delete', 'delet', 'dele', 'del', 'de', 'd'), + #None, + #('python', lambda cfg, cache, tpl: print('template:delete')), + #'delete an existing template'), + #(('edit', 'edi', 'ed', 'e'), + #None, + #('python', lambda cfg, cache, tpl: print('template:edit')), + #'edit an existing template'), + #(('list', 'lis', 'li', 'ls', 'l'), + #None, + #('python', lambda cfg, cache, tpl: print('template:list')), + #'list defined templates'), + #(('show', 'sho', 'sh', 's'), + #None, + #('python', lambda cfg, cache, tpl: print('template:show')), + #'show template details'), + #), + #None, + #'information representation template related commands' + #), + #(('worklog', 'worklo', 'workl', 'work', 'wor', 'wo', 'wl', + #'w'), + #( + #(('add', 'ad', 'a'), + #None, + #('python', lambda cfg, cache, tpl: print('worklog:add')), + #'add records into issue\'s worklog'), + #(('delete', 'delet', 'dele', 'del', 'de', 'd'), + #None, + #('python', lambda cfg, cache, tpl: print('worklog:delete')), + #'delete work log records'), + #(('edit', 'edi', 'ed', 'e'), + #None, + #('python', lambda cfg, cache, tpl: print('worklog:edit')), + #'edit existing work log records'), + #(('list', 'lis', 'li', 'ls', 'l'), + #None, + #('python', lambda cfg, cache, tpl: print('worklog:list')), + #'list existing worklog records'), + #(('show', 'sho', 'sh', 's'), + #None, + #('python', lambda cfg, cache, tpl: print('worklog:show')), + #'show worklog issues'), + #), + #None, + #'worklog related commands' + #), + ) + + + def _build_porcelain_commands(self): + command_defs = self.cfg.o.get('commands') + if command_defs: + self.porcelain_commands = \ + self._build_commands_from_config(command_defs) + else: + self.porcelain_commands = () + + + def _build_commands_from_config(self, cfg_commands): + result = [] + index = {} + if not cfg_commands: + return None + for name, command in cfg_commands.iteritems(): + result.append(self._build_one_command_from_config(name, command)) + return result + + + def _build_one_command_from_config(self, name, cfg_cmd): + res_aliases = [name,] + cfg_aliases = cfg_cmd.get('aliases') + if cfg_aliases: + for alias in cfg_aliases.split(','): + res_aliases.append(alias.strip()) + res_help = '' + cfg_help = cfg_cmd.get('help') + if cfg_help: + res_help = cfg_help.strip() + res_function = None + cfg_plumbing = cfg_cmd.get('plumbing') + if cfg_plumbing: + res_function = ('plumbing', cfg_plumbing.strip()) + res_sc = None + cfg_sc = cfg_cmd.get('sc') + if cfg_sc: + res_sc = self._build_commands_from_config(cfg_sc) + return (res_aliases, res_sc, res_function, res_help) + + + def _build_root_parser(self, commands=None): + parser = argparse.ArgumentParser( + prog='jic', + description='jic - JIRA CLI tool (v%s)\n\n'\ + 'Allows dealing with JIRA from within '\ + 'your terminal.' % __version__, + fromfile_prefix_chars='@', + formatter_class=argparse.RawTextHelpFormatter, + add_help=False + ) + + parser.add_argument('-h', '--help', action='store_true', + dest='help', + help='show usage information') + parser.add_argument('--porcelain', action='store_const', + const='porcelain', dest='mode', + help='set porcelain mode of operation') + parser.add_argument('--plumbing', action='store_const', + const='plumbing', dest='mode', + help='set plumbing mode of operation') + + return parser + + + def _build_modal_parser(self, commands=None): + command_help = self._format_command_help( + commands, + 'supported commands are:\n\n') \ + if commands is not None \ + else None + + parser = argparse.ArgumentParser( + prog='jic', + description='jic - JIRA CLI tool (v%s)\n\n'\ + 'Allows dealing with JIRA from within '\ + 'your terminal.' % __version__, + epilog=command_help, + fromfile_prefix_chars='@', + formatter_class=argparse.RawTextHelpFormatter, + add_help=False + ) + + parser.add_argument('-a', '--all', action='store_true', + dest='process_all', default=None, + help='process all items/parts') + parser.add_argument('-b', '--order-by', nargs=1, action='append', + dest='order_by', metavar='CRITERIA_LIST', + help='order results according to the criteria') + parser.add_argument('-d', '--down', action='store_true', + dest='down', default=None, + help='process children') + parser.add_argument('-D', '--down-from', nargs=1, action='append', + dest='down_from', metavar="ISSUE_LIST", + help='process children of the parent(s) specified') + parser.add_argument('-e', '--editor', action='store_true', + dest='use_editor', default=None, + help='use editor to provide information to jic') + parser.add_argument('-f', '--filter', nargs=1, action='append', + dest='filter', metavar='CRITERIA_LIST', + help='only process items matching the criteria') + parser.add_argument('-F', '--fields', nargs=1, action='append', + dest='fields', metavar='FIELD_LIST', + help='only process the fields listed') + parser.add_argument('-h', '--help', action='store_true', + dest='help', default=None, + help='show usage information') + parser.add_argument('-H', '--depth', nargs=1, type=int, + dest='depth', default=None, metavar='NUMBER', + help='only process so many levels starting from the referred object') + #parser.add_argument('-i', '--interactive', action='store_true', + #dest='interactive', default=None, + #help='perform actions interactively, asking for confirmation') + #parser.add_argument('-j', '--json', action='store_true', + #dest='use_json', default=None, + #help='use JSON representation of the data') + parser.add_argument('-k', '--keys', action='store_true', + dest='get_keys', default=None, + help='get keys of the objects to be processed') + parser.add_argument('-L', '--link-types', nargs=1, action='append', + dest='link_types', metavar='LINK_TYPE_LIST', + help='process/use only the link types specified') + #parser.add_argument('-m', '--message', nargs=1, action='append', + #dest='message', metavar='TEXT', + #help='use this message, don\'t expect it from stdin/editor') + parser.add_argument('-n', '--number-of-items', nargs=1, + dest='number_of_items', metavar='NUMBER_OF_ITEMS', default=None, + help='process only so many items') + parser.add_argument('-o', '--flip_online', action='count', + dest='flip_online', default=None, + help='flip between cached and online modes') + parser.add_argument('--online', action='store_true', + dest='online', default=None, + help='perform actions on the server') + parser.add_argument('--cached', action='store_true', + dest='cached', default=None, + help='perform actions on the server using cache') + parser.add_argument('-O', '--offline', action='store_true', + dest='offline', default=None, + help='perform actions in the local cache only') + parser.add_argument('-p', '--parts', nargs=1, action='append', + dest='parts', metavar='PART_LIST', + help='process only object parts mentioned') + #parser.add_argument('-P', '--purge', nargs=1, action='append', + #dest='items_to_purge', metavar='ITEM_LIST', + #help='process only items mentioned') + parser.add_argument('-q', '--query', nargs=1, action='append', + dest='query', metavar='JQL_query', + help='get the list of issues to process using a JQL query') + parser.add_argument('-Q', '--query-stdin', action='store_true', + dest='query_in_stdin', default=None, + help='get the list of issues to process using a JQL query') + parser.add_argument('-r', '--raw', action='store_true', + dest='output_raw', default=None, + help='output raw data') + #parser.add_argument('-R', '--range', nargs=1, action='append', + #dest='range', metavar='RANGE', + #help='process only items matching the range') + parser.add_argument('-s', '--self', action='store_true', + dest='include_self', default=None, + help='include the issue specified (besides children/ancestors)') + parser.add_argument('-S', '--server', nargs=1, + dest='server_name', default=None, metavar='SERVER_NAME', + help='work with JIRA server specified') + parser.add_argument('-t', '--template', nargs=1, + dest='template', default=None, metavar='NAME', + help='use the template specified') + parser.add_argument('-T', '--issue-types', nargs=1, action='append', + dest='issue_types', metavar='ISSUE_TYPE_LIST', + help='process/use only the issue types specified') + parser.add_argument('-u', '--up', action='store_true', + dest='up', default=None, + help='process ancestors') + parser.add_argument('-U', '--up-from', nargs=1, action='append', + dest='up_from', metavar="ISSUE_LIST", + help='process ancestors of the issue(s) specified') + parser.add_argument('-v', '--verbose', action='count', + dest='verbosity', default=None, + help='be verbose while performing actions') + parser.add_argument('-V', '--version', action='version', + version='%(prog)s v' + __version__, + help='show jic\'s version information') + parser.add_argument('-w', '--output_width', nargs=1, + dest='output_width', metavar='OUTPUT_WIDTH', default=None, + help='wrap output as specified') + + return parser + + def _format_command_help(self, commands, title=None, + show_group_titles=False): + res = title \ + if title is not None \ + else '' + res += self._format_command_tree(commands, + show_title=show_group_titles) + return res + + def _format_command_tree(self, command_list, prefix='', + show_title=False): + res = '' + for aliases, commands, function, description in command_list: + if type(commands) in (list,tuple): + res += '%s%s%s\n' % ( + prefix, + aliases[0], + description if show_title else '') + res += self._format_command_tree(commands, + prefix + ' '*4, + show_title) + else: + prefixed_name = prefix + aliases[0] + res += '%-19s %-55s\n' % ( + prefixed_name, + description) + return res + first_cmd = True + for subject in cli_sac: + aliases, commands = subject + res += '\n ' + aliases[0] + ' (' + \ + ', '.join(aliases[1:]) + ')\n' + for names, _, description in commands: + res += ' %-19s %-55s\n' % (names[0], description) + res += '\nWhen subject is omitted, `issue` is assumed.\n\n' + \ + 'For more details please see `man jic`.\n' + + +CommandLine.cl_to_options_map = { + 'args': ('query.args', CommandLine.UNWRAP_NONE), + 'depth': ('query.depth', CommandLine.UNWRAP_SINGLE), + 'down': ('query.down', CommandLine.UNWRAP_NONE), + 'down_from': ('query.down_from', CommandLine.UNWRAP_LIST), + 'fields': ('display.fields', CommandLine.UNWRAP_LIST_OF_LISTS), + 'filter': ('query.filter', CommandLine.UNWRAP_LIST), + 'get_keys': ('query.get_keys', CommandLine.UNWRAP_NONE), + 'help': ('help', CommandLine.UNWRAP_NONE), + 'include_self': ('query.include_self', CommandLine.UNWRAP_NONE), + 'interactive': ('cl.interactive', CommandLine.UNWRAP_NONE), + 'issue_types': ('query.issue_types', CommandLine.UNWRAP_LIST_OF_LISTS), + 'items_to_purge': ('query.items_to_purge', CommandLine.UNWRAP_LIST_OF_LISTS), + 'link_types': ('query.link_types', CommandLine.UNWRAP_LIST_OF_LISTS), + 'message': ('query.message', CommandLine.UNWRAP_LIST), + 'mode': ('cl.mode', CommandLine.UNWRAP_NONE), + 'number_of_items': ('query.number_of_items', CommandLine.UNWRAP_SINGLE), + 'flip_online': ('query.flip_online', CommandLine.UNWRAP_NONE), + 'online': ('query.online', CommandLine.UNWRAP_NONE), + 'offline': ('query.offline', CommandLine.UNWRAP_NONE), + 'order_by': ('display.order_by', CommandLine.UNWRAP_LIST_OF_LISTS), + 'output_raw': ('display.raw', CommandLine.UNWRAP_NONE), + 'parts': ('query.parts', CommandLine.UNWRAP_LIST_OF_LISTS), + 'process_all': ('query.process_all', CommandLine.UNWRAP_NONE), + 'query': ('query.jql', CommandLine.UNWRAP_SINGLE), + 'query_in_stdin': ('query.from_stdin', CommandLine.UNWRAP_NONE), + 'range': ('query.range', CommandLine.UNWRAP_LIST_OF_LISTS), + 'server_name': ('server', CommandLine.UNWRAP_SINGLE), + 'template': ('display.template', CommandLine.UNWRAP_SINGLE), + 'up': ('query.up', CommandLine.UNWRAP_NONE), + 'up_from': ('query.up_from', CommandLine.UNWRAP_LIST_OF_LISTS), + 'use_editor': ('cl.use_editor', CommandLine.UNWRAP_NONE), + 'verbosity': ('display.verbosity', CommandLine.UNWRAP_NONE), + 'output_width': ('display.output_width', CommandLine.UNWRAP_SINGLE), +} + +# class CommandLine + + +class Cache (object): + + HOME = 'cache' + ISSUE_CACHE = 'issues' + ISSUE_EDITMETA_CACHE = 'editmeta' + ISSUE_CREATEMETA_CACHE = 'createmeta' + WORKLOG_CACHE = 'worklogs' + FIELD_CACHE = 'fields' + + TS_FORMAT = '%Y-%m-%d %H:%M:%S' + + # modes of operation + DEFAULT_MODE = 'cached' + + # always get from the server, keep the cache up to date + MODE_ONLINE = 'online' + + # get from the cache of not stale (ttl wise), get from the server + # before updating, send updates to the server (write through) + MODE_CACHED = 'cached' + + # only use local cache (also for recording updates) + MODE_OFFLINE = 'offline' + + # default time to live for cached items + DEFAULT_TTL = 3600 + + # use online searches in cached mode (istead of cache based one) + DEFAULT_SEARCH_ONLINE = True + + # data source options + SOURCE_CACHE_ONLY = 0 + SOURCE_CACHED_SERVER = 1 + SOURCE_SERVER = 2 + + # level 1 cache (level 2 being the FS storage) + issue_L1 = {} + editmeta_L1 = {} + createmeta_L1 = {} + worklog_L1 = {} + + + def __init__(self, cfg, server_name=None): + self.cfg = cfg + self.srv_name = server_name + + self.srv_name = cfg.o.get('server', self.srv_name) + + if not self.srv_name: + # TODO: report error + raise RuntimeError( + u'No server specified - please either '\ + u'do `jic select server` or use -S switch') + + self.srv_cfg = cfg.o.get('servers.' + self.srv_name) + + if self.srv_cfg is None: + # TODO: report error + raise RuntimeError( + 'Server \'%s\' is not known' % self.srv_name) + + self.jira = None + self.field_by_id = {} + self.field_by_name = {} + + self.home = os.path.expanduser(os.path.expandvars( + cfg.o.home.location + os.sep + Cache.HOME)) + + home_exists, home_accessible = Util.ensure_dir_access( + self.home, os.R_OK | os.W_OK | os.X_OK, 0700) + + if home_exists != True: + raise home_exists + + if not home_accessible: + raise RuntimeError( + 'Cache directory \'%s\' should have \'rwx\' mode' % self.home) + + self.srv_home = self.home + os.sep + self.srv_name + + srv_home_exists, srv_home_accessible = Util.ensure_dir_access( + self.srv_home, os.R_OK | os.W_OK | os.X_OK, 0700) + + if srv_home_exists != True: + raise home_exists + + if not srv_home_accessible: + raise RuntimeError( + 'Cache directory \'%s\' should have \'rwx\' mode' %\ + self.srv_home) + + self.issue_home = self.srv_home + os.sep + Cache.ISSUE_CACHE + + issue_home_exists, issue_home_accessible = Util.ensure_dir_access( + self.issue_home, os.R_OK | os.W_OK | os.X_OK, 0700) + + if issue_home_exists != True: + raise issue_home_exists + + if not issue_home_accessible: + raise RuntimeError( + 'Cache directory \'%s\' should have \'rwx\' mode' %\ + self.issue_home) + + self.editmeta_home = self.srv_home + os.sep \ + + Cache.ISSUE_EDITMETA_CACHE + + editmeta_home_exists, editmeta_home_accessible = \ + Util.ensure_dir_access( + self.editmeta_home, os.R_OK | os.W_OK | os.X_OK, 0700) + + if editmeta_home_exists != True: + raise editmeta_home_exists + + if not editmeta_home_accessible: + raise RuntimeError( + 'Cache directory \'%s\' should have \'rwx\' mode' %\ + self.editmeta_home) + + self.worklog_home = self.srv_home + os.sep + Cache.WORKLOG_CACHE + + worklog_home_exists, worklog_home_accessible = Util.ensure_dir_access( + self.worklog_home, os.R_OK | os.W_OK | os.X_OK, 0700) + + if worklog_home_exists != True: + raise worklog_home_exists + + if not worklog_home_accessible: + raise RuntimeError( + 'Cache directory \'%s\' should have \'rwx\' mode' %\ + self.worklog_home) + + self.mode = cfg.o.get('cache.mode', Cache.DEFAULT_MODE) + self.mode = self.srv_cfg.get('cache.mode', self.mode) + + # update caching mode + if cfg.o.get('query.offline', False): + self.mode = 'offline' + else: + if cfg.o.get('query.online', False): + self.mode = 'online' + flipovers = cfg.o.get('query.flip_online', 0) + if flipovers > 1: + self.mode = 'online' + elif flipovers == 1: + self.mode = 'cached' + + if self.mode == 'offline': + self.source = Cache.SOURCE_CACHE_ONLY + elif self.mode == 'cached': + self.source = Cache.SOURCE_CACHED_SERVER + elif self.mode == 'online': + self.source = Cache.SOURCE_SERVER + else: + raise RuntimeError( + u'Unknown caching mode: \'%s\'' % self.mode) + + self.ttl = cfg.o.get('cache.ttl', Cache.DEFAULT_TTL) + self.ttl = self.srv_cfg.get('cache.ttl', self.ttl) + + self.user = cfg.o.get('user', None) + self.user = self.srv_cfg.get('user', self.user) + if not self.user: + raise RuntimeError( + u'Internal error: unknown user') + + self.variables = { + 'me': self.user, + } + + self.search_online = cfg.o.get('query.search_online', + Cache.DEFAULT_SEARCH_ONLINE) + self.search_online = self.srv_cfg.get('query.search_online', + self.search_online) + + if not self._load_fields() \ + or (Util.is_stale(self.ttl, self.fields_ts) \ + and self.mode != Cache.MODE_OFFLINE): + self._cache_fields() + + if not self._load_createmeta() \ + or (Util.is_stale(self.ttl, self.create_meta_ts) \ + and self.mode != Cache.MODE_OFFLINE): + self._cache_createmeta() + + # resolve field names for query.filter + def resolve_field_name(self, expression): + if not expression: + return expression + if type(expression) not in (list,tuple): + return expression + lhs, op, rhs = expression + if isinstance(lhs, basestring): + new = self.get_field_id(lhs) + if new: + lhs = new + else: + lhs = resolve_field_name(self, lhs) + + if type(rhs) != list: + rhs = resolve_field_name(self, rhs) + + return (lhs, op, rhs) + + filter = resolve_field_name(self, cfg.o.get('query.filter')) + cfg.o.set('query.filter', filter) + + vpre(VERBOSITY_INFO, u'Cache: server=%s, mode=%s' %\ + (self.srv_name, self.mode)) + + + def get_jira(self): + if self.mode == Cache.MODE_OFFLINE: + class Stub(object): + def __init__(self): + self._session = None + self._options = None + + return Stub() + + if self.jira is None: + oauth_token = self.srv_cfg.get('oauth.token') + oauth_secret = self.srv_cfg.get('oauth.secret') + oauth_cert = self.srv_cfg.get('oauth.cert') + user = self.srv_cfg.get('user') + server = self.srv_cfg.get('url') + password = self.srv_cfg.get('password') + + if not server: + raise RuntimeError( + 'URL is missing for server \'%s\'' % self.srv_name) + + if oauth_token and oauth_secret: + #vpre(VERBOSITY_INFO, u'INFO: Using OAuth to connect to JIRA') + if not oauth_cert: + raise RuntimeError( + 'OAuth certificate is missing for server \'%s\'' %\ + self.srv_name) + + options = { 'server': server, 'verify': False } + + oauth_auth = { + 'access_token': oauth_token, + 'access_token_secret': oauth_secret, + 'consumer_key': 'jic-tool', + 'key_cert': oauth_cert + } + self.jira = JIRA(options=options, oauth=oauth_auth) + return self.jira + + if user and not password: + password = self.get_password_from_keyring_or_console( + self.srv_name, user) + + options = { 'server': server, 'verify': False } + if user: + auth = (user, password) + self.jira = JIRA(options=options, basic_auth=auth) + else: + self.jira = JIRA(options=options) + + return self.jira + + + def get(self, issues=None, go_down=False, go_up=False, + down_from=None, up_from=None, inclusive=False, + link_types=None, depth=None, order_by=None, limit=None, + filter=None, query=None, source=None, worklogs=False): + """Get issues from cache/server according to the parameters` + + Parameters: + issues - iterable source of issue keys to return + go_down - also return issues down from each of `issues` + entries using the `link_types` for traversal + and going for `depth` steps in the hierarchy + go_up - also return issues up from each of `issues` + entries using the `link_types` for traversal + and going for `depth` steps in the hierarchy + down_from - iterable source of issue keys to traverse the + tree of links from; direction is towards + children; the traveral-starting issue is also + included if `include_self` is True; depth of + traveral is limited by `depth` parameter + up_from - same as for `down_from`, but the direction is + towards parents + inclusive - also include the traversal-starting issue in + results - it's not included by default + link_types - list of link type names to match; only links + with matched type names will be traversed + depth - integer specifying the depth of traversal; 1 + being the traversal-starting issue only, 0 + returning no issues + order_by - iterable with field names to order by, + prefixed by '-' (for descending) or '+' (for + ascending; default) to specify the direction; + if missing - `issues` are returned first, + followed by `down-from` ones and then + `up_from` ones + filter - parsed filter criteria + limit - limit the number of issues returned + query - JQL query + worklogs - also get worklogs (exposed as 'worklogs' + property for each issue returned) + + Returns: + an iterable with Issue objects ordered according to the + request + """ + + if limit: + try: + limit = int(limit) + except TypeError: + limit = -1 + else: + limit = -1 + + link_types = [lt.strip() for lt in link_types if lt] \ + if link_types else None + + if source is None: + source = self.source + + # 1. process `issues`, `filter` and `order_by` + if isinstance(issues, basestring): + issues = (issues,) + + ts_now = datetime.datetime.utcnow() + + listed_issues = [] + if issues: + listed_issues = self._get_by_keys( + issues, go_down, go_up, + link_types, depth, order_by, + filter, limit) + elif filter: + listed_issues = self._get_by_filter(filter, order_by, limit) + + downward_issues = [] + if down_from: + if isinstance(down_from, basestring): + down_from = (down_from,) + for issue in down_from: + if inclusive: + if isinstance(issue, basestring): + issues = self.get(issue) + if not len(issues): + continue + issue = issues[0] + downward_issues.append(issue) + linked = self._get_linked_issues( + issue, True, False, link_types, + depth, order_by, limit, filter) + if len(linked): + downward_issues.extend(linked) + + upward_issues = [] + if up_from: + if isinstance(up_from, basestring): + up_from = (up_from,) + for issue in up_from: + if inclusive: + if isinstance(issue, basestring): + issues = self.get(issue) + if not len(issues): + continue + issue = issues[0] + upward_issues.append(issue) + linked = self._get_linked_issues( + issue, False, True, link_types, + depth, order_by, limit, filter) + if len(linked): + upward_issues.extend(linked) + + queried_issues = self._get_by_query(query, limit) \ + if query else [] + + result = listed_issues + downward_issues + upward_issues + \ + queried_issues + + if order_by: + vpre(VERBOSITY_INFO, u'Sorting result set...', end=u'') + result = Util.sort_issues(result, order_by) + vpre(VERBOSITY_INFO, u' - done!') + + # TODO: get worklogs from the cache + if worklogs: + updated = [] + max_idx = len(result) + for idx, issue in enumerate(result, 1): + ts, wl = self._load_worklogs(issue.key) + if Util.is_stale(self.ttl, ts) \ + and self.mode in (Cache.MODE_ONLINE,): + vpre(VERBOSITY_INFO, + u'\rFetching worklogs: %s (%d/%d)%s' %\ + (issue.key, idx, max_idx, u' '*10), + end=u'') + wl = self.get_jira().worklogs(issue.key) + self._cache_worklogs(issue, wl) + issue.worklogs = wl + updated.append(issue) + vpre(VERBOSITY_INFO, u' - done!') + result = updated + + return result + + + def get_comment(self, issue, comment_id): + if isinstance(issue, basestring): + issue = self._get_by_keys((issue,)) + if not issue: + return None + issue = issue[0] + for comment in issue.fields.comment.comments: + if comment.id == comment_id: + return comment + return None + + + def create(self, fields): + if self.mode == Cache.MODE_OFFLINE: + raise RuntimeError( + u'Creating issues is only supported in online mode') + new_issue = self.get_jira().create_issue(fields) + self._cache_issue(new_issue) + return new_issue + + + def update(self, changes): + """Update issues in cache or on the server. + + Parameters: + changes - iterable of tuples: + #0: issue key + #1: dictionary with changes + Returns: + nothing + """ + + if self.mode in (Cache.MODE_CACHED, Cache.MODE_OFFLINE): + raise RuntimeError( + u'Only online mode is supported in this version of jic') + + pass + + + def rollback(self, changes): + """Roll changes back in cache or on the server""" + # TODO: implement + raise RuntimeError( + u'Rolling back is not implemented in this version of jic') + + + def pull(self): + """Refresh only the already cached issues from the server""" + + if self.mode in (Cache.MODE_OFFLINE,): + raise RuntimeError( + u'Can\'t pull in offline mode') + + fetched_issues = set() + + done = 1 + + wls = self._cached_worklogs() + total = len(wls) + + issues = self._cached_issues() + total += len(issues) + + for key in wls: + vpre(VERBOSITY_INFO, + u'\rPulling: %s (%d/%d)%s' %\ + (key, done, total, u' '*10), + end='') + self.fetch(key, worklogs=True) + fetched_issues.add(key) + done += 1 + + for key in issues: + vpre(VERBOSITY_INFO, + u'\rPulling: %s (%d/%d)%s' %\ + (key, done, total, u' '*10), + end='') + if key not in fetched_issues: + self.fetch(key) + done += 1 + + vpre(VERBOSITY_INFO, u' - done!') + + + def fetch(self, issues=None, go_down=False, go_up=False, + down_from=None, up_from=None, inclusive=False, + link_types=None, depth=None, order_by=None, limit=None, + filter=None, query=None, worklogs=False): + """Fetch issues from the server and update cache""" + + if self.mode == Cache.MODE_OFFLINE: + raise RuntimeError( + u'Fetching is not possible in offline mode') + + if limit: + try: + limit = int(limit) + except TypeError: + limit = -1 + else: + limit = -1 + + link_types = [lt.strip() for lt in link_types if lt] \ + if link_types else None + + if isinstance(issues, basestring): + issues = (issues,) + + ts_now = datetime.datetime.utcnow() + + listed_issues = [] + if issues: + listed_issues = self._get_by_keys( + issues, go_down, go_up, + link_types, depth, order_by, + filter, limit, + source=Cache.SOURCE_SERVER) + elif filter: + listed_issues = self._get_by_filter( + filter, + order_by, + limit, + source=Cache.SOURCE_SERVER) + + downward_issues = [] + if down_from: + if isinstance(down_from, basestring): + down_from = (down_from,) + for issue in down_from: + if inclusive: + if isinstance(issue, basestring): + issues = self.get(issue) + if not len(issues): + continue + issue = issues[0] + downward_issues.append(issue) + linked = self._get_linked_issues( + issue, True, False, link_types, + depth, order_by, limit, filter, + source=Cache.SOURCE_SERVER) + if len(linked): + downward_issues.extend(linked) + + upward_issues = [] + if up_from: + if isinstance(up_from, basestring): + up_from = (up_from,) + for issue in up_from: + if inclusive: + if isinstance(issue, basestring): + issues = self.get(issue) + if not len(issues): + continue + issue = issues[0] + upward_issues.append(issue) + linked = self._get_linked_issues( + issue, False, True, link_types, + depth, order_by, limit, filter, + source=Cache.SOURCE_SERVER) + if len(linked): + upward_issues.extend(linked) + + queried_issues = self._get_by_query(query, limit) \ + if query else [] + + result = listed_issues + downward_issues + upward_issues + \ + queried_issues + + if order_by: + #vpre(VERBOSITY_INFO, u'Sorting result set...', end=u'') + result = Util.sort_issues(result, order_by) + #vpre(VERBOSITY_INFO, u' - done!') + + # fetch editmeta + updated = [] + max_idx = len(result) + for idx, issue in enumerate(result, 1): + #vpre(VERBOSITY_INFO, + #u'\rFetching editmeta: %s (%d/%d)%s' %\ + #(issue.key, idx, max_idx, u' '*10), + #end=u'') + em = self.get_jira().editmeta(issue.key) + self._cache_editmeta(issue, em) + issue.editmeta = em + updated.append(issue) + result = updated + #vpre(VERBOSITY_INFO, u' - done!') + + if worklogs: + updated = [] + max_idx = len(result) + for idx, issue in enumerate(result, 1): + #vpre(VERBOSITY_INFO, + #u'\rFetching worklogs: %s (%d/%d)%s' %\ + #(issue.key, idx, max_idx, u' '*10), + #end=u'') + wl = self.get_jira().worklogs(issue.key) + self._cache_worklogs(issue, wl) + issue.worklogs = wl + updated.append(issue) + result = updated + #vpre(VERBOSITY_INFO, u' - done!') + + return result + + + def push(self, issues=None, down_from=None, up_from=None, + inclusive=False, depth=None, order_by=None, limit=None): + """Push cached changes to the server""" + # TODO: implement + raise RuntimeError( + u'Pushing is not implemented in this version of jic') + + + def forget(self, issues=None, down_from=None, up_from=None, + inclusive=False, depth=None, order_by=None, limit=None): + """Remove issues' information from the cache""" + # TODO: implement + raise RuntimeError( + u'Forgetting is not implemented in this version of jic') + + + def stat(self): + """Get cache statistics""" + # TODO: implement + return None + + + def add_comment(self, issue, comment_body): + if self.mode == Cache.MODE_OFFLINE: + raise RuntimeError( + u'Adding comments in offline mode not supported yet') + + if not isinstance(issue, basestring): + issue = issue.key + + try: + vpre(VERBOSITY_INFO, + u'Adding a comment for %s...' % issue, + end=u'') + comment = self.get_jira().add_comment(issue, comment_body) + vpre(VERBOSITY_INFO, u' - done!') + except JIRAError, e: + pre('ERROR: Unable to add comment for issue \'%s\': %s' % (\ + issue, str(e))) + vpre(VERBOSITY_INFO, + u'Fetching the update version of %s...' % issue, + end=u'') + self.fetch(issue) + vpre(VERBOSITY_INFO, u' - done!') + + + def get_field_id(self, partial_name): + if not self.fields: + return None + + result = self.field_by_id.get(partial_name) + if result: + return result['id'] + + result = self.field_by_name.get(partial_name) + if result: + return result['id'] + + matches = [] + for name in self.field_by_id.keys(): + if name.find(partial_name) != -1: + matches.append(self.field_by_id[name]['id']) + if len(matches) > 1: + raise RuntimeError( + u'Partially specified field name \'%s\' is ambigous' %\ + partial_name) + if matches: + return matches[0] + + matches = [] + for name in self.field_by_name.keys(): + if name.find(partial_name) != -1: + matches.append(self.field_by_name[name]['id']) + if len(matches) > 1: + raise RuntimeError( + u'Partially specified field name \'%s\' is ambigous' %\ + partial_name) + if matches: + return matches[0] + + return None + + + def _load_fields(self): + file_name = self.srv_home + os.sep + Cache.FIELD_CACHE + + self.fields = () + self.fields_ts = None + + try: + vpre(VERBOSITY_INFO, + u'Loading field information from the cache...', end=u'') + f = open(file_name, 'r') + timestamp = datetime.datetime.strptime( + f.readline().strip(), Cache.TS_FORMAT) + raw = json.load(f) + f.close() + vpre(VERBOSITY_INFO, u' - done!') + + self.fields = FieldMetadata(raw) + self.fields_ts = timestamp + + except IOError, e: + # TODO: properly handle + if e.errno != errno.ENOENT: + vpre(VERBOSITY_ERRORS, + u'ERROR: unable to load cached field '\ + u'information: %s' % str(e)) + return False + + except Exception, e: + # TODO: properly handle + if e.errno != errno.ENOENT: + vpre(VERBOSITY_ERRORS, + u'ERROR: unable to load cached field '\ + u'information: %s' % str(e)) + return False + + return True + + + def _cache_fields(self): + try: + self.fields = [ + { 'id': 'key', + 'name': 'Key', + 'schema': { + 'type': 'string', + 'system': 'key',} + }, ] + if self.mode != Cache.MODE_OFFLINE: + vpre(VERBOSITY_INFO, + u'Fetching field information...', + end=u'') + self.fields.extend(self.get_jira().fields()) + self.fields = FieldMetadata(self.fields) + self.fields_ts = datetime.datetime.utcnow() + + if self.mode != Cache.MODE_OFFLINE: + file_name = self.srv_home + os.sep + Cache.FIELD_CACHE + tmp_file_name = self.issue_home + os.sep + '.' + Cache.FIELD_CACHE + + f = open(tmp_file_name, 'w') + f.write(self.fields_ts.strftime('%s\n' % Cache.TS_FORMAT)) + f.write(json.dumps(self.fields.field_meta, indent=2, sort_keys=True)) + f.close() + os.rename(tmp_file_name, file_name) + vpre(VERBOSITY_INFO, u' - done!') + + except JIRAError, e: + vpre(VERBOSITY_ERRORS, + u'ERROR: unable to fetch field list '\ + u'from the server \'%s\': %s' % (self.srv_name, str(e))) + return False + + except IOError, e: + vpre(VERBOSITY_ERRORS, + u'ERROR: unable to store field list: %s' % str(e)) + return False + + except Exception, e: + vpre(VERBOSITY_ERRORS, + u'ERROR: error fetching field list: %s' % str(e)) + return False + + return True + + + def _load_createmeta(self): + file_name = \ + self.srv_home + os.sep + Cache.ISSUE_CREATEMETA_CACHE + + self.create_meta = () + self.create_meta_ts = None + + try: + vpre(VERBOSITY_INFO, + u'Loading createmeta information from the cache...', end=u'') + f = open(file_name, 'r') + timestamp = datetime.datetime.strptime( + f.readline().strip(), Cache.TS_FORMAT) + raw = json.load(f) + f.close() + vpre(VERBOSITY_INFO, u' - done!') + + self.create_meta = CreateIssueMetadata(raw) + self.create_meta_ts = timestamp + + except IOError, e: + # TODO: properly handle + if e.errno != errno.ENOENT: + vpre(VERBOSITY_ERRORS, + u'ERROR: unable to load cached createmeta '\ + u'information: %s' % str(e)) + return False + + except Exception, e: + # TODO: properly handle + if e.errno != errno.ENOENT: + vpre(VERBOSITY_ERRORS, + u'ERROR: unable to load cached createmeta '\ + u'information: %s' % str(e)) + return False + + return True + + def get_password_from_keyring_or_console(self, server, user): + + name = "jic.server.%s" %(server) + + try: + from keyring import get_password, set_password + password = get_password(name, user) + except ImportError: + vpre(VERBOSITY_INFO, + u'keyring module not installed, getting password from console', + end=u'') + password = None + + if not password: + password = self.get_password_from_console(server, user) + + try: + set_password(name, user, password) + except: + pass + return password + + + def get_password_from_console(self, server, user): + + #vpre(VERBOSITY_INFO, u'INFO: Using user/password to connect to JIRA') + if not sys.stdin.isatty(): + raise RuntimeError( + u'Password or oauth token/secret are missing '\ + u'for server \'%s\'' % server) + pre(u'Please enter password for %s at %s.' % (\ + user, server)) + try: + password = getpass(u'password: ') + except EOFError: + raise RuntimeError( + u'Password is missing for server \'%s\'' %\ + server) + return password + + + def _cache_createmeta(self): + try: + self.create_meta = CreateIssueMetadata({ 'projects': () }) + if self.mode != Cache.MODE_OFFLINE: + vpre(VERBOSITY_INFO, + u'Fetching createmeta information...', + end=u'') + self.create_meta = CreateIssueMetadata( + self.get_jira().createmeta( + expand='projects.issuetypes.fields')) + self.create_meta_ts = datetime.datetime.utcnow() + + if self.mode != Cache.MODE_OFFLINE: + file_name = \ + self.srv_home + os.sep + Cache.ISSUE_CREATEMETA_CACHE + tmp_file_name = \ + self.issue_home + os.sep + '.' + Cache.ISSUE_CREATEMETA_CACHE + + f = open(tmp_file_name, 'w') + f.write(self.create_meta_ts.strftime('%s\n' % Cache.TS_FORMAT)) + f.write(json.dumps(self.create_meta.create_meta, + indent=2, sort_keys=True)) + f.close() + os.rename(tmp_file_name, file_name) + vpre(VERBOSITY_INFO, u' - done!') + + except JIRAError, e: + vpre(VERBOSITY_ERRORS, + u'ERROR: unable to fetch createmeta information '\ + u'from the server \'%s\': %s' % (self.srv_name, str(e))) + return False + + except IOError, e: + vpre(VERBOSITY_ERRORS, + u'ERROR: unable to store createmeta information: %s' % str(e)) + return False + + except Exception, e: + vpre(VERBOSITY_ERRORS, + u'ERROR: error fetching createmeta information: %s' % str(e)) + return False + + return True + + + def _cache_issues(self, issues): + #vpre(VERBOSITY_INFO, + #u'Caching %d issues...' % len(issues), + #end=u'') + for issue in issues: + self._cache_issue(issue) + #vpre(VERBOSITY_INFO, u' - done!') + + + def _cache_issue(self, issue): + key = Util.get_nested_value(issue, 'key') + if not key: + return False + + ts = datetime.datetime.utcnow() + + self.issue_L1[key] = (ts, issue) + + file_name = self.issue_home + os.sep + str(key) + tmp_file_name = self.issue_home + os.sep + '.' + str(key) + + try: + f = open(tmp_file_name, 'w') + f.write(ts.strftime('%s\n' % Cache.TS_FORMAT)) + f.write(json.dumps(issue.raw, indent=2, sort_keys=True)) + f.close() + os.rename(tmp_file_name, file_name) + + except IOError, e: + vpre(VERBOSITY_ERRORS, + u'ERROR: unable to cache the issue %s: %s' %\ + (issue.key, str(e))) + return False + + except Exception, e: + vpre(VERBOSITY_ERRORS, + u'ERROR: error caching the issue %s: %s' %\ + (issue.key, str(e))) + return False + + return True + + + def _cache_editmeta(self, issue, editmeta): + if not isinstance(issue, basestring): + issue = Util.get_nested_value(issue, 'key') + + if not issue or not editmeta: + return False + + ts = datetime.datetime.utcnow() + + self.editmeta_L1[issue] = (ts, editmeta) + + file_name = self.editmeta_home + os.sep + str(issue) + tmp_file_name = self.editmeta_home + os.sep + '.' + str(issue) + + try: + f = open(tmp_file_name, 'w') + f.write(ts.strftime('%s\n' % Cache.TS_FORMAT)) + f.write(json.dumps(editmeta, indent=2, sort_keys=True)) + f.close() + os.rename(tmp_file_name, file_name) + + except IOError, e: + vpre(VERBOSITY_ERRORS, + u'ERROR: unable to cache editmeta for the issue %s: %s' %\ + (issue.key, str(e))) + return False + + except Exception, e: + vpre(VERBOSITY_ERRORS, + u'ERROR: error caching editmeta for the issue %s: %s' %\ + (issue.key, str(e))) + return False + + return True + + + def _cache_worklogs(self, issue, worklogs): + if not isinstance(issue, basestring): + issue = Util.get_nested_value(issue, 'key') + + if not issue or not worklogs: + return False + + if type(worklogs) not in (list,tuple): + worklogs = (worklogs,) + + ts = datetime.datetime.utcnow() + + self.worklog_L1[issue] = (ts, worklogs) + + file_name = self.worklog_home + os.sep + str(issue) + tmp_file_name = self.issue_home + os.sep + '.' + str(issue) + + try: + f = open(tmp_file_name, 'w') + f.write(ts.strftime('%s\n' % Cache.TS_FORMAT)) + f.write(json.dumps( + [item.raw for item in worklogs], + indent=2, sort_keys=True)) + f.close() + os.rename(tmp_file_name, file_name) + + except IOError, e: + vpre(VERBOSITY_ERRORS, + u'ERROR: unable to cache worklogs for the issue %s: %s' %\ + (issue.key, str(e))) + return False + + except Exception, e: + vpre(VERBOSITY_ERRORS, + u'ERROR: error caching worklogs for the issue %s: %s' %\ + (issue.key, str(e))) + return False + + return True + + + def _load_issue(self, issue_key): + if not issue_key: + return (None, None) + + L1_cached = self.issue_L1.get(issue_key) + if L1_cached: + return L1_cached + + file_name = self.issue_home + os.sep + str(issue_key) + + try: + f = open(file_name, 'r') + timestamp = f.readline().strip() + raw = json.load(f) + f.close() + + issue = Issue(self.get_jira()._options, self.get_jira()._session, raw) + + result = ( + datetime.datetime.strptime(timestamp, Cache.TS_FORMAT), + issue + ) + + self.issue_L1[issue.key] = result + + return result + + except IOError, e: + vpre(VERBOSITY_ERRORS, + u'ERROR: unable to load the issue %s: %s' %\ + (issue.key, str(e))) + return (None, None) + + except Exception, e: + # TODO: properly handle + vpre(VERBOSITY_ERRORS, + u'ERROR: error loading the issue %s: %s' %\ + (issue.key, str(e))) + return (None, None) + + return None, None + + + def _load_editmeta(self, issue): + if not issue: + return (None, None) + + if not isinstance(issue, basestring): + issue = issue.key + + L1_cached = self.editmeta_L1.get(issue) + if L1_cached: + return L1_cached + + file_name = self.editmeta_home + os.sep + str(issue) + + try: + f = open(file_name, 'r') + timestamp = f.readline().strip() + editmeta = json.load(f) + f.close() + + result = ( + datetime.datetime.strptime(timestamp, Cache.TS_FORMAT), + editmeta + ) + + self.editmeta_L1[issue] = result + + return result + + except IOError, e: + vpre(VERBOSITY_ERRORS, + u'ERROR: unable to load editmeta for the issue %s: %s' %\ + (issue.key, str(e))) + return (None, None) + + except Exception, e: + # TODO: properly handle + vpre(VERBOSITY_ERRORS, + u'ERROR: error loading editmeta for the issue %s: %s' %\ + (issue.key, str(e))) + return (None, None) + + return None, None + + + def _load_worklogs(self, issue): + if not issue: + return (None, None) + + if not isinstance(issue, basestring): + issue = issue.key + + L1_cached = self.worklog_L1.get(issue) + if L1_cached: + return L1_cached + + file_name = self.worklog_home + os.sep + str(issue) + + try: + f = open(file_name, 'r') + timestamp = f.readline().strip() + raw = json.load(f) + f.close() + + worklogs = [ + Worklog(self.get_jira()._options, self.get_jira()._session, worklog_raw) \ + for worklog_raw in raw] + + result = ( + datetime.datetime.strptime(timestamp, Cache.TS_FORMAT), + worklogs + ) + + self.worklog_L1[issue] = result + + return result + + except IOError, e: + #vpre(VERBOSITY_ERRORS, + #u'ERROR: unable to load worklogs for the issue %s: %s' %\ + #(issue, str(e))) + return (None, None) + + except Exception, e: + # TODO: properly handle + vpre(VERBOSITY_ERRORS, + u'ERROR: error loading worklogs for the issue %s: %s' %\ + (issue.key, str(e))) + return (None, None) + + return None, None + + + def _cached_issues(self): + + for (_, _, filenames) in os.walk(self.issue_home): + break + + return [name for name in filenames if Util.is_issue_key(name)] + + def _cached_worklogs(self): + + for (_, _, filenames) in os.walk(self.worklog_home): + break + + return [name for name in filenames if Util.is_issue_key(name)] + + + def _get_by_keys(self, keys, go_down=False, go_up=False, + link_types=None, depth=None, order_by=None, + filter=None, limit=-1, source=None): + + if not keys or not len(keys): + return () + + ts_now = datetime.datetime.utcnow() + + if source is None: + source = self.source + + result_issues = 0 + + issues_to_fetch = [] + fresh_issues = [] + + for key in keys: + if source == Cache.SOURCE_SERVER \ + or (source == Cache.SOURCE_CACHED_SERVER \ + and self.search_online): + issues_to_fetch.append(key) + result_issues += 1 + if limit > 0 and limit <= result_issues: + break + else: + ts, issue = self._load_issue(key) + if not issue: + continue + if source == Cache.SOURCE_CACHED_SERVER: + if Util.is_stale(self.ttl, ts): + issues_to_fetch.append(key) + result_issues += 1 + if limit > 0 and limit <= result_issues: + break + else: + fresh_issues.append(issue) + result_issues += 1 + if limit > 0 and limit <= result_issues: + break + if go_down or go_up: + issues = \ + self._get_linked_issues( + issue, go_down, go_up, link_types, + depth, order_by, filter, + (limit - result_issues) \ + if limit > 0 and not order_by \ + else -1, + source=source) + for issue in issues: + fresh_issues.append(issue) + result_issues += 1 + elif source == Cache.SOURCE_CACHE_ONLY: + fresh_issues.append(issue) + result_issues += 1 + if limit > 0 and limit <= result_issues: + break + if go_down or go_up: + issues = \ + self._get_linked_issues( + issue, go_down, go_up, link_types, + depth, order_by, filter, + (limit - result_issues) \ + if limit > 0 and not order_by \ + else -1, + source=source) + for issue in issues: + fresh_issues.append(issue) + result_issues += 1 + else: + raise RuntimeError( + u'Internal error: unknown source for Cache') + + if issues_to_fetch: + jql = u'key in (%s)' % u','.join(issues_to_fetch) + if filter: + jql += u' and %s' % JQL.emit_filter(filter) + if order_by: + jql += JQL.emit_order_by(order_by) + + try: + #vpre(VERBOSITY_INFO, + #u'Fetching %d issues...' % len(issues_to_fetch), + #end=u'') + issues = self.get_jira().search_issues( + jql, fields='*all', + expand='changelog', + maxResults=\ + (limit - result_issues) \ + if limit > 0 and not order_by \ + else -1) + #vpre(VERBOSITY_INFO, u' - done!') + issues_to_fetch = [] + #vpre(VERBOSITY_INFO, + #u'Caching %d issues...' % len(issues), + #end=u'') + for issue in issues: + issue.worklogs = None + issues_to_fetch.append(issue) + self._cache_issue(issue) + result_issues += 1 + #vpre(VERBOSITY_INFO, u' - done!') + + if go_down or go_up: + for linked_issue in self._get_linked_issues( + issue, go_down, go_up, link_types, + depth, order_by, filter, + (limit - result_issues) \ + if limit > 0 and not order_by \ + else -1, + source=source): + issues_to_fetch.append(issue) + result_issues += 1 + + except JIRAError, e: + #sc = int(Util.get_nested_value(e, 'status_code', 0)) + #if sc >= 400 and sc < 500: + #pre(u'ERROR: Issue%s \'%s\' not found.' % ( \ + #u's' if len(issues) > 1 else u'', + #u', '.join(issues))) + # TODO: should we abort in this case? + #return () + #else: + raise RuntimeError(str(e)) + + # merge two lists + result = fresh_issues + issues_to_fetch + + if order_by: + vpre(VERBOSITY_INFO, u'Sorting result set...', end=u'') + result = Util.sort_issues(result, order_by) + vpre(VERBOSITY_INFO, u' - done!') + + if filter: + vpre(VERBOSITY_INFO, u'Filtering result set...', end=u'') + result = [issue for issue in result \ + if Util.issue_matches_filter( + issue, self.variables, filter)] + vpre(VERBOSITY_INFO, u' - done!') + + return result[:limit] if limit > 0 else result + + + def _get_by_filter(self, filter, order_by=None, limit=-1, source=None): + if not filter: + raise RuntimeError( + u'Internal error: empty filter for get_by_filter') + + if source is None: + source = self.source + + result = [] + + if source == Cache.SOURCE_SERVER \ + or (source == Cache.SOURCE_CACHED_SERVER \ + and self.search_online): + jql = JQL.emit_filter(filter, self.variables) + if order_by: + jql += u' %s' % JQL.emit_order_by(order_by) + + try: + vpre(VERBOSITY_INFO, + u'Fetching issues using JQL: \'%s\'...' % jql, + end=u'') + issues = self.get_jira().search_issues( + jql, fields='*all', + expand='changelog', + maxResults=limit) + vpre(VERBOSITY_INFO, u' - done!') + vpre(VERBOSITY_INFO, + u'Caching %d issues...' % len(issues), + end=u'') + for issue in issues: + issue.worklogs = None + result.append(issue) + self._cache_issue(issue) + vpre(VERBOSITY_INFO, u' - done!') + + except JIRAError, e: + sc = int(Util.get_nested_value(e, 'status_code', 0)) + if sc >= 400 and sc < 500: + vpre(VERBOSITY_ERRORS, u'ERROR: Issues not found.') + # TODO: should we abort in this case? + #return () + else: + raise RuntimeError(str(e)) + + elif source == Cache.SOURCE_CACHED_SERVER \ + or source == Cache.SOURCE_CACHE_ONLY: + cached_issues = self._cached_issues() + vpre(VERBOSITY_INFO, + u'Loading issues using filter...', + end=u'') + for key in cached_issues: + _, issue = self._load_issue(key) + if Util.issue_matches_filter(issue, self.variables, filter): + result.append(issue) + if limit > 0 \ + and not order_by \ + and len(result) >= limit: + break + vpre(VERBOSITY_INFO, u' - done!') + + if order_by: + vpre(VERBOSITY_INFO, u'Sorting result set...', end=u'') + result = Util.sort_issues(result, order_by) + vpre(VERBOSITY_INFO, u' - done!') + + else: + raise RuntimeError(u'Internal error: unknown mode') + + return result[:limit] if limit > 0 else result + + + def _get_by_query(self, query, limit=-1): + if not query: + raise RuntimeError( + u'Internal error: empty filter for get_by_query') + + result = [] + issues = () + + for jql in query: + + if self.source == Cache.SOURCE_SERVER \ + or (self.source == Cache.SOURCE_CACHED_SERVER \ + and self.search_online): + try: + vpre(VERBOSITY_INFO, + u'Fetching issues using JQL: \'%s\'...' % jql, + end=u'') + issues = self.get_jira().search_issues( + jql, fields='*all', + expand='changelog', + maxResults=limit) + vpre(VERBOSITY_INFO, u' - done!') + vpre(VERBOSITY_INFO, + u'Caching %d issues...' % len(issues), + end=u'') + for issue in issues: + issue.worklogs = None + result.append(issue) + self._cache_issue(issue) + vpre(VERBOSITY_INFO, u' - done!') + + except JIRAError, e: + sc = int(Util.get_nested_value(e, 'status_code', 0)) + if sc >= 400 and sc < 500: + vpre(VERBOSITY_ERRORS, + u'ERROR: %s' % str(e.text)) + #vpre(VERBOSITY_ERRORS, + #u'ERROR: Issue%s \'%s\' not found.' % ( \ + #u's' if len(issues) > 1 else u'', + #u', '.join(issues))) + # TODO: should we abort in this case? + #return () + else: + raise RuntimeError(str(e)) + + elif self.source == Cache.SOURCE_CACHED_SERVER \ + or self.source == Cache.SOURCE_CACHE_ONLY: + raise RuntimeError( + u'Offline JQL queries are not supported yet.') + + else: + raise RuntimeError(u'Internal error: unknown mode') + + return result[:limit] if limit > 0 else result + + + def _get_linked_issues(self, pivot_issue, traverse_down=True, + traverse_up=False, link_types=True, + depth=None, order_by=None, filter=None, + limit=None, query=None, source=None): + """Depth first tree traversal""" + if not pivot_issue: + raise RuntimeError( \ + u'Internal error: missing pivot issue') + + if isinstance(pivot_issue, basestring): + issues = self.get(pivot_issue) + if not len(issues): + raise RuntimeError( + u'WARNING: Issue %s not found' % pivot_issue) + pivot_issue = issues[0] + + if source is None: + source = self.source + + if depth is not None and depth <= 0: + depth = None + + queue = [(pivot_issue, traverse_up, traverse_down, depth), ] + visited = set() + result = [] + vpre(VERBOSITY_INFO, + u'Traversing links for the issue %s...' % pivot_issue, + end=u'') + while queue: + issue, go_up, go_down, depth = queue.pop(0) + + if isinstance(issue, basestring): + if issue.strip() in visited: + continue + issues = self.get(issues=issue, source=source) + if issues is None: + raise RuntimeError( + u'Unable to retrieve issue %s' % issue) + issue = issues[0] + else: + if issue.key in visited: + continue + + visited.add(issue.key) + if issue.key != pivot_issue.key: + result.append(issue) + + if depth is not None and depth <= 0: + continue + + links = Util.get_nested_value( + issue, 'fields.issuelinks', ()) + + queue_chunk = [] + for link in links: + if link_types and link.type.name not in link_types: + continue + linked_go_up, linked_go_down = go_up, go_down + + child_issue_stub = Util.get_nested_value( + link, 'inwardIssue') + #child_key = Util.get_nested_value( + #link, 'inwardIssue.key') + if go_down and child_issue_stub: + child_issue_stub.stub = True + if child_issue_stub.key == pivot_issue.key: + linked_go_up = False + child_issue = self.get(issues=child_issue_stub.key, + source=source) + if not child_issue: + child_issue = child_issue_stub + else: + child_issue = child_issue[0] + queue_chunk.append( + (child_issue, linked_go_up, + linked_go_down, + depth - 1 if depth else None)) + + parent_issue_stub = Util.get_nested_value( + link, 'outwardIssue') + if go_up and parent_issue_stub: + parent_issue_stub.stub = True + if parent_issue_stub.key == pivot_issue.key: + linked_go_down = False + parent_issue = self.get(issues=parent_issue_stub.key, + source=source) + if not parent_issue: + parent_issue = parent_issue_stub + else: + parent_issue = parent_issue[0] + queue_chunk.append( + (parent_issue, linked_go_up, + linked_go_down, + depth - 1 if depth else None)) + + queue[0:0] = queue_chunk + vpre(VERBOSITY_INFO, u' - done!') + + if order_by: + vpre(VERBOSITY_INFO, u'Sorting result set...', end=u'') + result = Util.sort_issues(result, order_by) + vpre(VERBOSITY_INFO, u' - done!') + + return result + + +# class Cache + + +class Template (object): + + HOME = 'templates' + DEFAULT_TEMPLATE_NAME = 'default' + DEFAULT_FIELD_DELIMITER = u' - ' + DEFAULT_MINIMAL_ITEM_WIDTH = 8 + DEFAULT_OUTPUT_WIDTH = 72 + + def __init__(self, cfg, cache=None): + self.cfg = cfg + self.cache = cache + self.home = os.path.expanduser(os.path.expandvars( + cfg.o.home.location + os.sep + Template.HOME)) + + home_exists, home_accessible = Util.ensure_dir_access( + self.home, os.R_OK | os.W_OK | os.X_OK, 0700) + + if home_exists != True: + raise home_exists + + if not home_accessible: + raise RuntimeError( + 'Template directory \'%s\' should have \'rwx\' mode' % self.home) + + # set default formatters + self.init = Template.Default.init + self.format_value = Template.Default.format_value + self.format_field_name = Template.Default.format_field_name + self.format_field_value = Template.Default.format_field_value + self.get_issue_type_icon = Template.Default.get_issue_type_icon + self.print_issue_list = Template.Default.print_issue_list + self.format_issue_list_item = \ + Template.Default.format_issue_list_item + self.print_issue_tree = Template.Default.print_issue_tree + self.issue_tree_item = Template.Default.format_issue_tree_item + self.format_issue_header = Template.Default.format_issue_header + self.format_all_issue_fields = \ + Template.Default.format_all_issue_fields + self.print_issue_links = Template.Default.print_issue_links + self.format_issue_links_item = \ + Template.Default.format_issue_links_item + self.print_issue_comments = \ + Template.Default.print_issue_comments + self.format_issue_comments_item = \ + Template.Default.format_issue_comments_item + self.print_issue_history = Template.Default.print_issue_history + self.format_issue_history_item = \ + Template.Default.format_issue_history_item + self.print_issue_worklog = Template.Default.print_issue_worklog + self.format_issue_worklog_item = \ + Template.Default.format_issue_worklog_item + self.format_server_list_item = \ + Template.Default.format_server_list_item + self.format_server = Template.Default.format_server + self.convert_issue_fields_for_editing = \ + Template.Default.convert_issue_fields_for_editing + self.generate_issue_fields_for_creation = \ + Template.Default.generate_issue_fields_for_creation + self.convert_issue_fields_after_editing = \ + Template.Default.convert_issue_fields_after_editing + self.get_field_list_for_issue_creation = \ + Template.Default.get_field_list_for_issue_creation + self.get_field_list_for_issue_editing = \ + Template.Default.get_field_list_for_issue_editing + self.get_field_list_to_force_jicML_markers = \ + Template.Default.get_field_list_to_force_jicML_markers + self.format_issue_field_meta = \ + Template.Default.format_issue_field_meta + self.get_new_issue_field_values = \ + Template.Default.get_new_issue_field_values + + template = cfg.o.get('display.template') + if not template: + vpre(VERBOSITY_INFO, u'Using default template.') + else: + vpre(VERBOSITY_INFO, + u'Loading template: \'%s\'...' % template, + end=u'') + filenames = () + for (dirpath, dirnames, filenames) in os.walk(self.home): + break + if template not in filenames: + vpre(VERBOSITY_WARNINGS, + u'\nWARNING: Template \'%s\' not found, using default one.' %\ + template) + else: + try: + options = Namespace() + parsed = {} + publics = { + 'template': self, + 'def_template': Template.Default, + 'util': Util, + } + execfile(self.home + os.sep + template, publics) + + except Exception, e: + # TODO: report properly + vpre(VERBOSITY_WARNINGS, + u'WARNING: Unable to load template \'%s\': %s' %\ + (template, unicode(e))) + vpre(VERBOSITY_INFO, u' - done!') + + self.field_delimiter = cfg.o.get('display.field_delimiter', + Template.DEFAULT_FIELD_DELIMITER) + + self.minimal_item_width = Template.DEFAULT_MINIMAL_ITEM_WIDTH + self.output_width = cfg.o.get('display.output_width', + Template.DEFAULT_OUTPUT_WIDTH) + + if cache: + self.field_by_id = cache.field_by_id + self.field_by_name = cache.field_by_name + else: + self.field_by_id = {} + self.field_by_name = {} + + # adjust template using command line options + if cache: + fields = cfg.o.get('display.fields') + if fields: + resolved_fields = [] + for name in fields: + resolved_name = cache.get_field_id(name) + resolved_fields.append( + resolved_name if resolved_name else name) + self.fields = resolved_fields + else: + self.fields = fields + + + def get_output_width(self): + try: + return int(os.environ.get('COLUMNS', self.output_width)) + except Exception: + return self.output_width + + + def _get_field_name(self, field_id): + return self.cache.fields.get_field_name(field_id, field_id) + + + def _get_field_id(self, field_name): + return self.field_by_name.get(field_name) + + + # default template implementation + class Default (object): + + DATETIME_FORMAT = '%Y-%m-%d %H:%M:%S' + + @staticmethod + def init(tpl): + return + + @staticmethod + def format_value(value): + if isinstance(value, basestring): + return value + + if value is None: + return u'' + + t = type(value) + if t == datetime.datetime: + return value.strftime(Template.Default.DATETIME_FORMAT) + + elif t in (tuple, list): + return u', '.join([Template.Default.format_value(item) \ + for item in value]) + + elif t == dict: + return u', '.join( \ + [u'%s: %s' %(name, val) \ + for name, val in value.iteritems()]) + + elif t in (Status, Component, Version, Project, IssueType, + Resolution): + return value.name + + elif t == User: + name = value.name + disp_name = value.displayName + email = value.emailAddress + if disp_name: + str_user = '%s (%s) <%s>' % (disp_name, name, email) + else: + str_user = email + return str_user + + else: + return str(value) + + + @staticmethod + def format_field_name(tpl, issue, field_name): + return tpl._get_field_name(field_name) + + + @staticmethod + def format_field_value(tpl, issue, field_name, value): + return tpl.format_value(value) + + + @staticmethod + def format_issue_from_fieldlist(tpl, issue, fields, width=None): + if width is None: + width = tpl.get_output_width() + values = [] + first_field = None + for field in fields: + if field[0] == '$': + function = tpl.__dict__.get(field[1:], None) + if function is None: + value = '!function %s not found!' % field[1:] + value = function(tpl, issue) + else: + value = Util.get_issue_field_value(issue, field) + if first_field is None: + first_field = value + tpl.field_delimiter + value = tpl.format_field_value(tpl, issue, field, value) + values.append(value) + if width and width > 0: + return textwrap.wrap( \ + tpl.field_delimiter.join(values), + width, + break_long_words=True, + subsequent_indent=u' ' * len(first_field)) + else: + return (tpl.field_delimiter.join(values),) + + + @staticmethod + def get_issue_type_icon(tpl, issue): + issue_type = Util.get_nested_value( + issue, 'fields.issuetype.name', '') + issue_summary = Util.get_nested_value( + issue, 'fields.summary', '') + if issue_type == u'Sub-task': + return u'➏' + elif issue_type == u'Blueprint': + return u'➎' + elif issue_type == u'Engineering card': + return u'➃' + elif issue_type == u'Link': + return u'➃' + elif issue_type == u'Roadmap Card': + if issue_summary.lower().startswith(u'epic:'): + return u'➁' + else: + return u'➂' + elif issue_type == u'Request': + return u'➀' + elif issue_type == u'New Feature': + return u'➀' + elif issue_type == u'Bug': + return u'➑' + + return issue_type + + + @staticmethod + def print_issue_list(tpl, issues, printer=None, width=None): + if printer == None: + printer = pr + for issue in issues: + for line in tpl.format_issue_list_item(tpl, issue, width): + printer(line) + + + @staticmethod + def format_issue_list_item(tpl, issue, width=None): + if tpl.fields: + return Template.Default.\ + format_issue_from_fieldlist(tpl, issue, tpl.fields) + if width is None: + width = tpl.get_output_width() + + type_icon = tpl.get_issue_type_icon(tpl, issue) + key = Util.get_nested_value( + issue, 'key', 'N/A') + summary = Util.get_nested_value( + issue, 'fields.summary', 'N/A') + status = Util.get_nested_value( + issue, 'fields.status.name', 'N/A') + resolution = Util.get_nested_value( + issue, 'fields.resolution.name') + if resolution: + status += u' / ' + issue.fields.resolution.name + + icon_and_key = len(type_icon) + 1 + len(key) + \ + len(tpl.field_delimiter) + all_but_summary = icon_and_key + \ + len(status) + len(tpl.field_delimiter) + + available_width = width + + delta = available_width - all_but_summary + if delta < tpl.minimal_item_width: + return textwrap.wrap( \ + u'%s %s%s%s' % ( + tpl.format_field_value(tpl, issue, 'key', key), + tpl.format_field_value(tpl, issue, 'summary', summary), + tpl.field_delimiter, + tpl.format_field_value(tpl, issue, 'status', status))) + else: + wrapped_summary = textwrap.wrap(summary, delta, + break_long_words=True) + result = [u'%s %s%s%s%s%s' % ( + type_icon, + tpl.format_field_value(tpl, issue, 'key', key), + tpl.field_delimiter, + tpl.format_field_value(tpl, issue, 'summary', wrapped_summary[0]), + tpl.field_delimiter, + tpl.format_field_value(tpl, issue, 'status', status)),] + for line in wrapped_summary[1:]: + result.append( + tpl.format_field_value(tpl, issue, 'status', + u'%s%s' % ( + u' ' * icon_and_key, + line))) + + return result + + + ISSUE_FIELDS = ( + # field name is a massive text + ('key', False), + ('issuetype', False), + ('summary', False), + ('status', False), + ('resolution', False), + ('resolutiondate', False), + ('assignee', False), + ('updated', False), + ('fixVersions', False), + ('reporter', False), + ('created', False), + ('project', False), + ('components', False), + ('labels', False), + ('customfield_10204', True), + ('URL', False), + ('description', True), + ) + + @staticmethod + def format_issue_fields(tpl, issue, + field_defs=ISSUE_FIELDS, width=None): + if width is None: + width = tpl.get_output_width() + + result = u'' + + for field, multiline in field_defs: + value = Util.get_issue_field_value(issue, field, True) + if not value: + continue + f_field = tpl.format_field_name(tpl, issue, field) + f_value = tpl.format_field_value(tpl, issue, field, value) + result += jicML.emit_value(f_value, f_field, width, + force_markers=multiline)\ + + u'\n' + + return result + + + @staticmethod + def format_all_issue_fields(tpl, issue, width=None): + return Template.Default.format_issue_fields( + tpl, issue, Template.Default.ISSUE_FIELDS, width) + + + @staticmethod + def format_issue_header(tpl, issue, width=None): + if width is None: + width = tpl.get_output_width() + return Template.Default.format_issue_fields( + tpl, issue, Template.Default.ISSUE_FIELDS[:5], + width) + + + @staticmethod + def print_issue_links(tpl, issue, printer=None, width=None): + if printer == None: + printer = pr + if width is None: + width = tpl.get_output_width() + + links = issue.fields.issuelinks + name = 'Links (%d)' % len(links) + name = tpl.format_field_name(tpl, issue, name) + printer(u'\n%s:\n' % name) + first = True + for link in links: + printer(tpl.format_issue_links_item(tpl, issue, link, width), + end='') + + + @staticmethod + def format_issue_links_item(tpl, issue, link, width=None): + if width is None: + width = tpl.get_output_width() + child = Util.get_nested_value(link, 'inwardIssue') + if child: + icon = u'-' + name = link.type.inward + other = child + else: + parent = Util.get_nested_value(link, 'outwardIssue') + icon = u'+' + name = link.type.outward + other = parent + + prefix = u'%s %s: ' % (icon, name) + + text = prefix + \ + ''.join(tpl.format_issue_list_item(tpl, other)) + result = u'' + + for line in textwrap.wrap( + text, + width, + subsequent_indent=u' ' * len(prefix)): + result += line + u'\n' + + return result + + + @staticmethod + def print_issue_comments(tpl, issue, printer=None, width=None): + if printer == None: + printer = pr + if width is None: + width = tpl.get_output_width() + + comments = issue.fields.comment.comments + name = 'Comments (%d)' % len(comments) + name = tpl.format_field_name(tpl, issue, name) + printer(u'\n%s:\n' % name) + first = True + for comment in comments: + if first: + first = False + else: + printer('') + printer(tpl.format_issue_comments_item(tpl, issue, comment, width), + end='') + + + @staticmethod + def format_issue_comments_item(tpl, issue, comment, width=None, + show_issue_key=False): + if width is None: + width = tpl.get_output_width() + created = Util.get_issue_field_value(comment, 'created') + updated = Util.get_issue_field_value(comment, 'updated') + id_str = u'[%s%s] ' % ( + (u'%s:' % issue.key) if show_issue_key else u'', + comment.id) + author = comment.author + result = u'' + if created != updated: + editor = comment.updateAuthor + text = u'%sOn %s, %s wrote and on %s, %s updated:\n' % ( + id_str, + Template.Default.format_field_value(tpl, + issue, 'created', created), + Template.Default.format_field_value(tpl, + issue, 'author', author), + Template.Default.format_field_value(tpl, + issue, 'updated', updated), + Template.Default.format_field_value(tpl, + issue, 'editor', editor)) + else: + text = u'%sOn %s, %s wrote:\n' % ( + id_str, + Template.Default.format_field_value(tpl, + issue, 'created', created), + Template.Default.format_field_value(tpl, + issue, 'author', author)) + for line in textwrap.wrap( + text, + width, + break_long_words=True, + subsequent_indent=u' ' * len(id_str)): + result += line + u'\n' + + + for line in textwrap.wrap( + comment.body, + width, + break_long_words=True, + replace_whitespace=False): + result += u'%s\n' % line + + return result + + + @staticmethod + def print_issue_history(tpl, issue, printer=None, width=None): + if printer == None: + printer = pr + if width is None: + width = tpl.get_output_width() + + changes = Util.get_issue_field_value(issue, + 'changelog.histories') + name = 'Changes (%d)' % len(changes) + name = tpl.format_field_name(tpl, issue, name) + printer(u'\n%s:\n' % name) + if not changes: + printer(u'no changes') + return + + first = True + for change in changes: + if first: + first = False + else: + printer('') + printer(tpl.format_issue_history_item(tpl, issue, change, width), + end='') + + + @staticmethod + def format_issue_history_item(tpl, issue, change, width=None): + if width is None: + width = tpl.get_output_width() + when = Util.get_issue_field_value(change, 'created') + who = Util.get_issue_field_value(change, 'author') + id_str = u'[%s] ' % change.id + text = u'%sOn %s, %s changed:\n' % ( + id_str, + Template.Default.format_field_value(tpl, issue, 'created', when), + Template.Default.format_field_value(tpl, issue, 'author', who)) + result = u'' + for line in textwrap.wrap( + text, + width, + break_long_words=True, + subsequent_indent=u' ' * len(id_str)): + result += line + u'\n' + + first = True + for change in change.items: + if first: + first = False + else: + result += u'\n' + what = change.field + from_value = change.fromString \ + if change.fromString is not None \ + else change.__dict__['from'] + from_value = u'' if from_value is None else from_value + from_value = from_value.splitlines() + to_value = change.toString \ + if change.toString is not None \ + else change.to + to_value = u'' if to_value is None else to_value + to_value = to_value.splitlines() + + skipping = True + for line in difflib.unified_diff(from_value, to_value): + if not skipping: + result += line + u'\n' + elif line.startswith(u'@@'): + skipping = False + result += u'%s %s\n' % (line.strip(), what) + + return result + + + @staticmethod + def print_issue_worklog(tpl, issue, printer=None, width=None): + if printer == None: + printer = pr + if width is None: + width = tpl.get_output_width() + + worklogs = issue.worklogs + if not worklogs: + worklogs = () + + name = 'Worklog (%d)' % len(worklogs) + name = tpl.format_field_name(tpl, issue, name) + printer(u'\n%s:\n' % name) + if not worklogs: + printer(u'no items') + return + + for wl in worklogs: + printer(tpl.format_issue_worklog_item(tpl, issue, wl)) + + + @staticmethod + def format_issue_worklog_item(tpl, issue, worklog_item, + width=None): + if width is None: + width = tpl.get_output_width() + + result = u'' + + id_str = u'[%s] ' % worklog_item.id + author = Util.get_nested_value(worklog_item, 'author', True) + created = Util.get_issue_field_value( + worklog_item, 'created') + updated = Util.get_issue_field_value( + worklog_item, 'updated') + started = Util.get_issue_field_value( + worklog_item, 'started') + spent = Util.get_issue_field_value( + worklog_item, 'timeSpent') + seconds_spent = int(Util.get_issue_field_value( + worklog_item, 'timeSpentSeconds')) + comment = Util.get_issue_field_value( + worklog_item, 'comment') + + comment = comment.strip() + + name = Util.get_nested_value(author, 'name') + disp_name = Util.get_nested_value(author, 'displayName') + email = Util.get_nested_value(author, 'emailAddress') + if disp_name: + str_author = u'%s <%s>' % (disp_name, email) + else: + str_author = name + + if created != updated: + editor = Util.get_nested_value(worklog_item, + 'updateAuthor', True) + name = Util.get_nested_value(editor, 'name') + disp_name = Util.get_nested_value(editor, 'displayName') + email = Util.get_nested_value(editor, 'emailAddress') + if disp_name: + str_editor = u'%s <%s>' % (disp_name, email) + else: + str_editor = name + text = u'%sOn %s, %s logged and on %s, %s updated %s%s\n' % ( + id_str, + Template.Default.format_field_value(tpl, 'created', created), + str_author, + Template.Default.format_field_value(tpl, 'updated', updated), + str_editor, + spent, + u':' if len(comment) else u'') + else: + text = u'%sOn %s, %s logged %s%s\n' % ( + id_str, + Template.Default.format_field_value(tpl, 'created', created), + str_author, + spent, + u':' if len(comment) else u'') + + for line in textwrap.wrap( + text, + tpl.get_output_width(), + break_long_words=True, + subsequent_indent=u' ' * len(id_str)): + result += line + u'\n' + + + if len(comment): + for line in textwrap.wrap( + comment, + tpl.get_output_width(), + break_long_words=True): + result += u'%s\n' % line + + return result + + + @staticmethod + def print_issue_tree(tpl, issue, printer=None, width=None): + if width is None: + width = tpl.get_output_width() + # TODO: implement + return u'N/A' + + + @staticmethod + def format_issue_tree_item(tpl, issue, width=None): + if width is None: + width = tpl.get_output_width() + # TODO: implement + return u'N/A' + + + @staticmethod + def format_server_list_item(tpl, srv_name, srv_def, is_default): + flags = [] + if srv_def.get('password'): + flags.append(u'pwd') + if srv_def.get('user'): + flags.append(u'usr') + if srv_def.get('oauth.cert'): + flags.append(u'crt') + if srv_def.get('oauth.token'): + flags.append(u'tok') + if srv_def.get('oauth.secret'): + flags.append(u'sec') + + flags = u', '.join(flags) + + url = srv_def.get('url', 'N/A') + + return u'%s %s [%s]: %s' % (u'*' if is_default else u' ', + srv_name, flags, url) + + + @staticmethod + def format_server(tpl, srv_name, srv_def, is_default): + result = u'Server%s: %s\n' % ( + u' (default)' if is_default else u'', + srv_name) + for name, value in srv_def.itertree(): + if isinstance(value, basestring) \ + and value.find('\n') != -1: + lines = value.splitlines() + max_idx = len(lines) - 1 + first = True + for idx, line in enumerate(lines): + if first: + result += (u'%s = \\\n' % name) + first = False + # fall through + result += (u' \'%s\'%s\n' % ( + line, ('\\' if idx != max_idx else ''))) + else: + result += (u'%s = %s\n' % (name, repr(value))) + return result + + + @staticmethod + def get_field_list_for_issue_creation(tpl, parent_issue, + issue_type, project_key): + return ( + 'Key', 'Summary', 'Issue Type', 'Priority', 'Component', + 'Fix Version/s', 'Labels', 'Assignee', 'Reporter', + 'Description' + ) + + + @staticmethod + def get_field_list_for_issue_editing(tpl, issue): + return ( + 'Key', 'Summary', 'Issue Type', 'Priority', 'Component', + 'Fix Version/s', 'Labels', 'Assignee', 'Reporter', + 'Description' + ) + + @staticmethod + def get_field_list_to_force_jicML_markers(tpl): + return ( 'Description', ) + + @staticmethod + def convert_issue_fields_for_editing(tpl, issue, field_meta, + fields=None): + if not fields: + fields = \ + tpl.get_field_list_for_issue_editing(tpl, issue) + #included_fields = set(fields) if fields else None + result = OrderedDict() + fname = field_meta.get_field_name('key', 'Key') + result[fname] = issue.key + #processed = set((fname,)) + + issue_fields = issue.fields.__dict__ + + if fields: + for name in fields: + fid = field_meta.get_field_id(name, name) + fname = field_meta.get_field_name(name, name) + #if fname in processed: + if fname in result: + continue + #processed.add(fname) + value = issue_fields.get(fid) + if not value and fid not in issue_fields: + raise RuntimeError( + u'Field \'%s\' is not known' % name) + value = field_meta.convert_field_value_for_editing( + fid, value) + if value is not None: + result[fname] = value + else: + for fid, value in issue_fields.iteritems(): + if fid[0] == '_': + continue + fname = field_meta.get_field_name(fid, fid) + #if fname in processed: + if fname in result: + continue + #processed.add(fname) + value = field_meta.convert_field_value_for_editing( + fid, value) + if value is not None: + result[fname] = value + + return result + + + @staticmethod + def generate_issue_fields_for_creation(tpl, issue, parent_issue, + issue_type, field_meta, fields=None): + if not fields: + fields = \ + tpl.get_field_list_for_issue_creation(tpl, issue) + included_fields = set(fields) if fields else None + result = OrderedDict() + fname = field_meta.get_field_name('key', 'Key') + result[fname] = issue.key + #processed = set((fname,)) + + issue_fields = issue.fields + + if fields: + for name in fields: + fid = field_meta.get_field_id(name, name) + fname = field_meta.get_field_name(name, name) + #if fname in processed: + if fname in result: + continue + #processed.add(fname) + value = issue_fields.get(fid) + if not value and fid not in issue_fields: + raise RuntimeError( + u'Field \'%s\' is not known' % name) + value = field_meta.convert_field_value_for_editing( + fid, value) + if value is not None: + result[fname] = value + else: + for fid, value in issue_fields.iteritems(): + if fid[0] == '_': + continue + fname = field_meta.get_field_name(fid, fid) + #if fname in processed: + if fname in result: + continue + #processed.add(fname) + value = field_meta.convert_field_value_for_editing( + fid, value) + if value is not None: + result[fname] = value + + return result + + + + @staticmethod + def convert_issue_fields_after_editing(tpl, fields, field_meta): + result = OrderedDict() + + for name, value in fields.iteritems(): + fid = field_meta.get_field_id(name) + fvalue = field_meta.convert_field_value_after_editing( + fid, value) + result[name] = fvalue + return result + + + @staticmethod + def format_issue_field_meta(tpl, field_meta): + val_type = Util.get_nested_value( + field_meta, 'schema.type', u'N/A') + if val_type == 'array': + val_type += u':%s' % Util.get_nested_value( + field_meta, 'schema.items', u'N/A') + return u'- %s (%s): %s' % ( + field_meta.get('name'), + field_meta.get('id'), + val_type) + + + @staticmethod + def get_new_issue_field_values(tpl, parent_issue, issue_type, + project): + project_name = \ + tpl.cache.create_meta.get_project_name(project) + values = { + 'assignee': parent_issue.fields.assignee, + 'priority': parent_issue.fields.priority, + 'project': project_name, + 'link [Implements]': parent_issue.key, + } + if issue_type == 'Sub-task': + values['parent'] = parent_issue.key + + return values + + # class Default + +# class Template + + +class jicML (object): + + MARKER_START = u'{{{' + MARKER_END = u'}}}' + + @staticmethod + def parse_nvpair(text_iter): + """Parses a single field and its value, returns those as a + tuple""" + + parsing_multiline = False + folding_ws = False + name = None + value = u'' + line = u'' + while True: + try: + line = text_iter.next() + except StopIteration: + line = None + + if parsing_multiline: + if line is None \ + or line.lstrip().startswith(jicML.MARKER_END): + if value and value[-1] == u'\n': + value = value[:-1] + return (name, value) + + pos = line.find(jicML.MARKER_END) + if pos < 0: + value += line + u'\n' + continue + + remainder = line[:pos] + if remainder: + value += remainder + else: + if value[-1] == u'\n': + value = value[:-1] + parsing_multiline = False + return (name, value) + + elif folding_ws: + if line is None \ + or not line.strip() \ + or line[0] not in u' \t': + if line is not None: + text_iter.previous() + value = value.replace(u'\\n', '\n') + return (name, value) + value += u' ' + line.strip() + continue + + if line is None: + return (None, None) + + sline = line.strip() + if not sline \ + or sline.startswith(u'#'): + continue + + if name is None and value == u'': + pos = line.find(jicML.MARKER_START) + if pos < 0: + lhs, delimiter, rhs = line.partition(u':') + if delimiter: + name = lhs.strip() + else: + rhs = lhs + else: + if line[:pos].find(u':') != -1: + lhs, delimiter, rhs = line.partition(u':') + if delimiter: + name = lhs.strip() + else: + rhs = lhs + else: + rhs = line + + rhs = rhs.lstrip() + + if rhs.startswith(jicML.MARKER_START): + value = rhs[len(jicML.MARKER_START):] + if not value.strip(): + value = u'' + parsing_multiline = True + continue + pos = value.find(jicML.MARKER_END) + if pos < 0: + parsing_multiline = True + continue + value = value[:pos] + return (name, value) + else: + value = rhs.strip() + folding_ws = True + continue + + return (None, None) # never reached + + + @staticmethod + def parse_values(jicml): + """Parse a stream of jicML values + + Parameters: + jicml - jicML formatted set of values + + Returns: + array - parsed values + """ + # TODO: implement + return None + + + @staticmethod + def parse_nvpairs(jicml): + """Parse a stream of jicML name:value pairs + + Parameters: + jicml - jicML formatted set of name:value pairs + + Returns: + dict - parsed name:value pairs + """ + # TODO: implement + return None + + + @staticmethod + def emit_value(value, field_name=None, width=0, + force_markers=False): + """Generates a jicML representation of a value + + Parameters: + value - a value to represent in jicML + field_name - a name of the field this value is related + to + width - text width (-1 means use terminal's one, 0 + means no wrapping) + force_markers - wrap value into multiline markers + regardless its nature (multiline vs + singleline) + + Returns: + string - generated jicML representation + """ + + if not value: + return u'{{{\n}}}' if force_markers else u'' + + if width == -1: + width = os.environ.get('COLUMNS', 0) + + if not isinstance(value, basestring): + t = type(value) + + if t == datetime.date: + value = value.strftime('%Y-%m-%d') + + elif t == datetime.datetime: + value = value.strftime('%Y-%m-%d %H:%M:%S') + + elif t in (tuple, list): + value = u', '.join(value) + + if force_markers or value.find('\n') != -1: + return \ + ((field_name + u': ') if field_name else u'') + \ + u'%s\n' % jicML.MARKER_START + value + \ + u'\n%s' % jicML.MARKER_END + + else: + text = \ + ((field_name + u': ') if field_name else u'') \ + + value + if width: + return u'\n'.join( + textwrap.wrap(text, width, + break_long_words=True, + subsequent_indent=u' ' * 4)) + else: + return text + + + @staticmethod + def emit_values(values, width=0, force_markers=False): + """Generates a jicML representation of a set of an values + + Parameters: + values - an enumerable container with values + width - text width (-1 means use terminal's one, 0 + means no wrapping) + force_markers - wrap value into multiline markers + regardless its nature (multiline vs + singleline) + + Returns: + string - generated jicML representation + """ + + if not values: + return '' + + result = u'' + + for value in values: + if type(value) in (list, tuple): + field_name, field_value = value[:2] + else: + field_name = None + field_value = value + result += Util.emit_value(field_value, field_name, width, + force_markers) + return result + + + @staticmethod + def parse_issues(text_iter): + issues = OrderedDict() + common_fields = [] + + collecting_common_fields = True + current_issue = None + while True: + name, value = jicML.parse_nvpair(text_iter) + if not name: + break + lname = name.lower() + + if collecting_common_fields: + if lname != 'key': + common_fields.append((name, value)) + continue + else: + collecting_common_fields = False + # fall through + + if lname == 'key': + current_issue = OrderedDict() + current_issue[name] = value + current_issue.update(common_fields) + issues[value] = current_issue + else: + current_issue[name] = value + + return issues + +# class jicML + + +class FieldMetadata(object): + + def __init__(self, field_meta=None): + self.field_meta = field_meta + self.field_meta.append( + { u'id': u'parent', + u'name': u'Parent Issue', + u'schema': { u'type': u'string', + u'system': u'parent' + } + }) + # index fields + self.field_by_id = {} + self.field_by_name = {} + for field_def in field_meta: + self.field_by_id[field_def.get('id')] = field_def + self.field_by_name[field_def.get('name')] = field_def + + + def get_field_id(self, partial_name, default=None): + result = self.field_by_id.get(partial_name) + if result: + return result['id'] + + result = self.field_by_name.get(partial_name) + if result: + return result['id'] + + matches = [] + for name in self.field_by_id.keys(): + if name.find(partial_name) != -1: + matches.append(self.field_by_id[name]['id']) + if len(matches) > 1: + raise RuntimeError( + u'Partially specified field name \'%s\' is ambigous' %\ + partial_name) + if matches: + return matches[0] + + matches = [] + for name in self.field_by_name.keys(): + if name.find(partial_name) != -1: + matches.append(self.field_by_name[name]['id']) + if len(matches) > 1: + raise RuntimeError( + u'Partially specified field name \'%s\' is ambigous' %\ + partial_name) + if matches: + return matches[0] + + return default + + + def get_field_name(self, partial_name, default): + result = self.field_by_id.get(partial_name) + if result: + return result['name'] + + result = self.field_by_name.get(partial_name) + if result: + return result['name'] + + matches = [] + for name in self.field_by_id.keys(): + if name.find(partial_name) != -1: + matches.append(self.field_by_id[name]['name']) + if len(matches) > 1: + raise RuntimeError( + u'Partially specified field name \'%s\' is ambigous' %\ + partial_name) + if matches: + return matches[0] + + matches = [] + for name in self.field_by_name.keys(): + if name.find(partial_name) != -1: + matches.append(self.field_by_name[name]['name']) + if len(matches) > 1: + raise RuntimeError( + u'Partially specified field name \'%s\' is ambigous' %\ + partial_name) + if matches: + return matches[0] + + return default + + + def convert_field_value_for_editing(self, field_name, field_value): + if field_value is None: + return u'' + + field_id = self.get_field_id(field_name) + val_type = self.get_field_type(field_id) + + return self._convert_field_value_for_editing( + field_id, val_type, field_value) + + + def _convert_field_value_for_editing(self, fname, ftype, fvalue): + + if isinstance(fvalue, basestring) \ + and ftype[0] not in ('date', 'datetime'): + return fvalue + + if ftype[0] in ('string',): + if hasattr(fvalue, 'value'): + return fvalue.value + return unicode(fvalue) + + elif ftype[0] == 'date': + date = datetime.date.strptime(fvalue, '%Y-%m-%d') + #return dt.strftime('%Y-%m-%d') + return dt + + elif ftype[0] == 'datetime': + dt = datetime.datetime.strptime( + fvalue[:19], '%Y-%m-%dT%H:%M:%S') + #return dt.strftime('%Y-%m-%d %H:%M:%S') + return dt + + elif ftype[0] in ('priority', 'project', 'status', 'resolution', + 'version', 'component', 'securitylevel', + 'issuetype'): + return Util.get_nested_value(fvalue, 'name') + + elif ftype[0] == 'number': + return fvalue + + elif ftype[0] == 'user': + #name = Util.get_nested_value(fvalue, 'name') + #disp_name = Util.get_nested_value(fvalue, 'displayName') + #email = Util.get_nested_value(fvalue, 'emailAddress') + + #result = u'' + #if disp_name: + #result = '%s (%s) <%s>' % (disp_name, name, email) + #else: + #result = email + + result = Util.get_nested_value(fvalue, 'name') + + if not result: + result = fvalue + + return result + + elif ftype[0] == 'progress': + #return u'%d/%d' % (fvalue.progress, fvalue.total) + return (fvalue.progress, fvalue.total) + + elif ftype[0] == 'array': + if ftype[1] in ('comment', 'issuelinks', 'votes', 'watches', + 'worklog'): + return None + result = [] + for subvalue in fvalue: + result.append(self._convert_field_value_for_editing( + fname, ftype[1:], subvalue)) + #return u', '.join(result) + return result + + return unicode(fvalue) + + + def convert_field_value_after_editing(self, field_id, field_value): + if field_value is None: + return u'' + + ftype = self.get_field_type(field_id) + + if ftype[0] in ('string', 'issuetype', 'priority', 'project', + 'status', 'resolution', 'version', + 'component', 'securitylevel', 'user'): + return field_value.strip() + + elif ftype[0] == 'date': + if not field_value: + return u'' + return datetime.date.strptime(field_value, '%Y-%m-%d') + + elif ftype[0] == 'datetime': + if not field_value: + return u'' + return datetime.datetime.strptime(field_value, + '%Y-%m-%d %H:%M:%S') + + elif ftype[0] == 'number': + return long(field_value) + + elif ftype[0] == 'progress': + progress, delim, total = field_value.partition('/') + if delim: + try: + return (long(progress), long(total)) + except ValueError: + return u'' + + elif ftype[0] == 'array': + if field_id == 'comment': + return field_value + + if not field_value: + return u'' + + values = [item.strip() for item in field_value.split(',')] + if ftype[1] == 'number': + try: + values = [long(item) for item in values] + except ValueError: + pass + return values + + return field_value + + + def get_field_type(self, field_id): + meta = self.field_by_id.get(field_id) + if not meta: + return 'string' + val_type = [Util.get_nested_value(meta, 'schema.type'), ] + if val_type[0] == 'array': + val_type.append(Util.get_nested_value(meta, 'schema.items')) + return val_type + + + def create_issue_mockup(self, issue_key, issue_type, field_values=None): + """Creates a mockup of a JIRA issue (data only) + + Parameters: + issue_key : string + key of the issue to create mockup for + issue_type : string + name of the issue type + field_values : iterable + list of (name, value) pairs providing values for + corresponding fields + Returns: + a data-only issue mockup (implemented using Namespace class) + """ + issue_mockup = Namespace() + for field_def in self.field_meta: + field_name = field_def['name'] + field_name = self.get_field_id(field_name) + value = None + if field_values and field_name in field_values: + value = field_values.get(field_name) + issue_mockup.set('fields.%s' % field_name, value) + + issue_mockup.key = issue_key + issue_mockup.issuekey = issue_key + issue_mockup.fields.issuetype = issue_type + + issue_mockup.learn(False) + + return issue_mockup + + +# class FieldMetadata + + +class CreateIssueMetadata(object): + + def __init__(self, create_meta=None): + self.create_meta = create_meta + # index projects, issue types and fields + self.project_by_key = {} + self.project_by_name = {} + for project_def in create_meta.get('projects'): + project_issue_types = {} + for issue_type_def in project_def.get('issuetypes', ()): + issue_type_fields = issue_type_def['fields'] + issue_type_info = (issue_type_def, issue_type_fields) + project_issue_types[issue_type_def['name']] = \ + issue_type_info + project_info = (project_def, project_issue_types) + self.project_by_key[project_def.get('key')] = project_info + self.project_by_name[project_def.get('name')] = project_info + + + def get_project_key(self, project): + project_def = self.project_by_key.get(project) + if not project_def: + project_def = self.project_by_name.get(project, (None,None)) + return project_def[0]['key'] + + + def get_project_name(self, project): + project_def = self.project_by_key.get(project) + if not project_def: + project_def = self.project_by_name.get(project, (None,None)) + return project_def[0]['name'] + + + def get_project_def(self, project): + project_def = self.project_by_key.get(project) + if not project_def: + project_def = self.project_by_name.get(project, (None,None)) + return project_def + + + def get_project_issue_type(self, project, issue_type): + project_def, issue_types = self.get_project_def(project) + if not project_def: + raise RuntimeError(u'Unknown project: \'%s\'' % project) + pair = issue_types.get(issue_type) + if not pair: + raise RuntimeError( + u'Unknown issue type \'%s\' for project \'%s\'' % ( + issue_type, project)) + issue_type_def, field_defs = pair + return issue_type_def + + + def get_project_issue_type_fields(self, project, issue_type): + project_def, issue_types = self.get_project_def(project) + if not project_def: + raise RuntimeError(u'Unknown project: \'%s\'' % project) + pair = issue_types.get(issue_type) + if not pair: + raise RuntimeError( + u'Unknown issue type \'%s\' for project \'%s\'' % ( + issue_type, project)) + issue_type_def, field_defs = pair + return field_defs + + +# class CreateIssueMetadata + + +class EditingContext(object): + + def __init__(self, cfg, cache, tpl): + self.cfg = cfg + self.cache = cache + self.field_meta = cache.fields + self.create_meta = cache.create_meta + self.tpl = tpl + self.original = OrderedDict() + self.edited = OrderedDict() + + + def prepare_issues_for_editing(self, issues): + jicml = u'' + first = True + prepared = OrderedDict() + forced_markers = \ + self.tpl.get_field_list_to_force_jicML_markers(self.tpl) + forced_markers = set([ + self.field_meta.get_field_name(item, item) \ + for item in forced_markers ]) + + for issue in issues: + if first: + first = False + else: + jicml += u'\n\n' + + fields = \ + self.tpl.convert_issue_fields_for_editing( + self.tpl, issue, self.cache.fields, + self.tpl.fields) + prepared[issue.key] = fields + for fname, value in fields.iteritems(): + if hasattr(issue, 'editmeta') \ + and issue.editmeta is not None: + editmeta = issue.editmeta.get('fields', None) + if editmeta: + fid = self.field_meta.get_field_id(fname, fname) + field_edit_meta = editmeta.get(fid, ()) + if field_edit_meta: + allowed_values = [] + for item in field_edit_meta.get('allowedValues', ()): + iname = item.get('name') + if iname: + allowed_values.append(iname) + if allowed_values: + text = u'Allowed values for '\ + u'\'%s\': %s\n' % ( + fname, u', '.join(allowed_values)) + lines = textwrap.wrap( + text, 72, + break_long_words=True, + initial_indent=u'# ', + subsequent_indent=u'# ') + jicml += u'\n'.join(lines) + u'\n' + jicml += u'%s: %s\n' % (fname, + jicML.emit_value( + value, + force_markers=(fname in forced_markers))) + + # TODO: allow adding a new comment within the same change set + #jicml += u'Comment: {{{\n}}}\n' + + self.original = prepared + + return jicml + + + def create_issue_mockup(self, issue_key, parent_issue, issue_type, + project): + """Creates a data-only mockup for an issue using information + provided""" + fields_with_values = self.tpl.get_new_issue_field_values( + self.tpl, parent_issue, issue_type, project) + mockup = self.field_meta.create_issue_mockup( + issue_key, issue_type, + fields_with_values) + return mockup + + + def prepare_new_issue_for_editing(self, issue, fields_to_show, + fields_meta=None): + jicml = u'' + forced_markers = \ + self.tpl.get_field_list_to_force_jicML_markers(self.tpl) + forced_markers = set([ + self.field_meta.get_field_name(item, item) \ + for item in forced_markers ]) + + for name in fields_to_show: + field_name = self.field_meta.get_field_name(name, name) + field_id = self.field_meta.get_field_id(name, name) + value = Util.get_issue_field_value(issue, field_id) + value = self.field_meta.convert_field_value_for_editing( + field_id, value) + + if value is None: + value = u'' + + if fields_meta and field_id in fields_meta: + allowed_values = [] + for item in fields_meta[field_id].get('allowedValues', ()): + iname = item.get('name') + if iname: + allowed_values.append(iname) + if allowed_values: + text = u'Allowed values for '\ + u'\'%s\': %s\n' % ( + field_name, u', '.join(allowed_values)) + lines = textwrap.wrap( + text, 72, + break_long_words=True, + initial_indent=u'# ', + subsequent_indent=u'# ') + jicml += u'\n'.join(lines) + u'\n' + + jicml += u'%s: %s\n' % (field_name, + jicML.emit_value( + value, + force_markers=(field_name in forced_markers))) + + return jicml + + + def prepare_issues_for_creating(self, issues): + jicml = u'' + first = True + prepared = OrderedDict() + for issue in issues: + if first: + first = False + else: + jicml += u'\n\n' + + fields = \ + self.tpl.generate_issue_fields_for_creation( + self.tpl, issue, self.cache.fields, + self.tpl.fields) + prepared[issue.key] = fields + for fname, value in fields.iteritems(): + if hasattr(issue, 'editmeta') \ + and issue.editmeta is not None: + editmeta = issue.editmeta.get('fields', None) + if editmeta: + fid = self.field_meta.get_field_id(fname, fname) + field_edit_meta = editmeta.get(fid, ()) + if field_edit_meta: + allowed_values = [] + for item in field_edit_meta.get('allowedValues', ()): + iname = item.get('name') + if iname: + allowed_values.append(iname) + if allowed_values: + text = u'Allowed values: %s\n' % (u', '.join(allowed_values)) + lines = textwrap.wrap( + text, 72, + break_long_words=True, + initial_indent=u'# ', + subsequent_indent=u'# ') + jicml += u'\n'.join(lines) + u'\n' + jicml += u'%s: %s\n' % (fname, jicML.emit_value(value)) + + # allow adding a new comment within the same change set + #jicml += u'Comment: {{{\n}}}\n' + + self.original = prepared + + return jicml + + + def parse_edited_issues(self, jicml_text): + issues = jicML.parse_issues(TextIterator(jicml_text)) + result = OrderedDict + for key in issues: + fields = issues.get(key) + fields = self.tpl.convert_issue_fields_after_editing( + self.tpl, fields, self.cache.fields) + issues[key] = fields + self.edited = issues + return issues + + + def generate_create_job(self): + edited = self.edited + if not edited: + raise RuntimeError( + u'Internal Error: no edited values for EditingContext') + + field_meta = self.cache.fields + + job = {} + current_issue = {} + + # go through the issues added in editor + for issue_key, fields in edited.iteritems(): + if not issue_key.startswith('NEW-'): + continue + + fields_to_set = {} + for fname, value in fields.iteritems(): + fid = field_meta.get_field_id(fname) + + if fid in ('issuekey', 'key'): + continue + + if value: + self._add_field_update_job( + fields_to_set, fid, None, value) + if fields_to_set: + job[issue_key] = fields_to_set + + return job + + + def generate_update_job(self): + original = self.original + #if not original: + #raise RuntimeError( + #u'Internal Error: no original values for EditingContext') + edited = self.edited + if not edited: + raise RuntimeError( + u'Internal Error: no edited values for EditingContext') + + field_meta = self.cache.fields + + job = {} + current_issue = {} + eissues_processed = set() + + # go through the originally listed issues + for issue_key, ofields in original.iteritems(): + if issue_key not in edited: + continue # removed by user - skip + if issue_key.startswith('NEW-'): + continue + eissues_processed.add(issue_key) + efields = edited.get(issue_key) + efields_processed = set(('key', 'issuekey')) + + # go through the original fields + for fname, ovalue in ofields.iteritems(): + fid = field_meta.get_field_id(fname) + if fid in ('issuekey', 'key'): + continue + + efields_processed.add(fname) + + evalue = efields.get(fname) + + if fname not in efields \ + or ovalue == evalue \ + or (not ovalue and not evalue): + continue + + if issue_key not in job: + current_issue = {} + job[issue_key] = current_issue + + self._add_field_update_job( + current_issue, fid, ovalue, efields.get(fname)) + + # now - through the newly added ones + for fname, evalue in efields.iteritems(): + if fname in efields_processed: + continue + + fid = field_meta.get_field_id(fname) + + if fid in ('issuekey', 'key'): + continue + + if issue_key not in job: + current_issue = {} + job[issue_key] = current_issue + + self._add_field_update_job( + current_issue, fid, None, evalue) + + # now go through the issues added in editor + for issue_key, efields in edited.iteritems(): + if issue_key in eissues_processed: + continue + if issue_key.startswith('NEW-'): + continue + + for fname, evalue in efields.iteritems(): + fid = field_meta.get_field_id(fname) + + if fid in ('issuekey', 'key'): + continue + + if issue_key not in job: + current_issue = {} + job[issue_key] = current_issue + + self._add_field_update_job( + current_issue, fid, None, evalue) + + return job + + + def _add_field_update_job(self, issue_job, fid, ovalue, evalue): + ftype = self.field_meta.get_field_type(fid) + if ftype[0] == 'array': + if fid == 'labels' and not evalue: + evalue = [] + if fid not in ('labels',): + evalue = [{ 'name': item} for item in evalue] + elif ftype[0] in ('priority', 'user', 'issuetype'): + evalue = { 'name': evalue } + elif ftype[0] in ('project',): + project_key = self.create_meta.get_project_key(evalue) + evalue = { 'key': project_key } + + if fid == 'parent': + evalue = { 'key': evalue } + + issue_job[fid] = evalue + return issue_job + +# class EditingContext + + +################################################################ +### Commands ################################################# +################################################################ + + +GET_COMMENTS_PROMPT = """\ +# Please enter one or more comments to be added for the following +# issues (in jicml value format - please see `man jicml`): +# %s +# +# If you provide less comments than the number issues listed, the last +# comment provided will be used for all the issues that are lacking +# their individually provided comments (this can be used to add the same +# comment into multiple issues - just provide one comment in this case). +# +# Leave the file intact or delete all its content to cancel the +# operation. + +""" + +def cmd_comments_add(cfg): + cache = Cache(cfg) + tpl = Template(cfg, cache) + + if cache.mode == Cache.MODE_OFFLINE: + raise RuntimeError( + u'Can\'t add comments in offline mode yet') + + listed_issues = \ + Util.unwrap_list_of_lists(cfg.o.get('query.args',())) + + if not listed_issues: + vpre(VERBOSITY_ERRORS, u'ERROR: Please specify issue '\ + u'id(s) to add comment(s) to') + return 1 + + job = [] + job_keys = set() + for key in listed_issues: + if not Util.is_issue_key(key): + vpre(VERBOSITY_WARNINGS, u'WARNING: Invalid issue key '\ + u'\'%s\' - skipped.' % key) + continue + if key not in job_keys: + job.append(key) + job_keys.add(key) + + if not job: + vpre(VERBOSITY_WARNINGS, u'WARNING: Nothing to do.') + return 1 + + issues = cache.get(job) + if not issues: + vpre(VERBOSITY_ERRORS, u'ERROR: Unable to retrieve issues: '\ + u'%s' % (u', '.join(job))) + return 1 + + # editor based + if cfg.o.get('cl.use_editor', False): + text_for_editing = GET_COMMENTS_PROMPT % (u', '.join(job)) + editor = cfg.o.get('editor', 'sensible-editor') + vpre(VERBOSITY_INFO, u'Invoking %s to edit get comment(s)' %\ + editor) + comment_bodies = Util.get_from_editor(cfg, editor, + text_for_editing, 'text') + if not comment_bodies \ + or comment_bodies == text_for_editing: + vpre(VERBOSITY_INFO, u'Cancelled') + return 1 + + strin = TextIterator(comment_bodies) + + _, comment_body = jicML.parse_nvpair(strin) + if comment_body is None or not comment_body.strip(): + vpre(VERBOSITY_WARNINGS, u'WARNING: Not adding an empty comment') + return 1 + + for issue in issues: + try: + cache.add_comment(issue, comment_body) + except Exception, e: + vpre(VERBOSITY_ERRORS, + u'ERROR: Unable to add new comment for '\ + u'issue \'%s\': %s' % (issue.key, str(e))) + + # if there are no more comments - use the last one + _, next_comment = jicML.parse_nvpair(strin) + if next_comment is not None: + comment_body = next_comment + + # stdin based + else: + if sys.stdin.isatty(): + raise RuntimeError( + u'Please pipe comment body in or use -e switch.') + stdin = FileTextIterator(sys.stdin) + _, comment_body = jicML.parse_nvpair(stdin) + if comment_body is None or not comment_body.strip(): + vpre(VERBOSITY_ERRORS, u'ERROR: Not adding an empty comment') + return 1 + + for issue in issues: + try: + cache.add_comment(issue, comment_body) + except Exception, e: + vpre(VERBOSITY_ERRORS, + u'ERROR: Unable to add new comment for issue '\ + u'\'%s\': %s' % (issue.key, str(e))) + + # if there are no more comments - use the last one + _, next_comment = jicML.parse_nvpair(stdin) + if next_comment is not None: + comment_body = next_comment + + return 0 + + +def cmd_comments_delete(cfg): + cache = Cache(cfg) + tpl = Template(cfg, cache) + + if cache.mode == Cache.MODE_OFFLINE: + raise RuntimeError( + u'Can\'t delete comments in offline mode yet') + + if cfg.o.get('cl.use_editor', False): + raise RuntimeError( + u'Comment deletion using an editor is not implemented yet.') + + listed_comments = \ + Util.unwrap_list_of_lists(cfg.o.get('query.args',())) + + job = Util.parse_comment_ids(listed_comments) + + if not job: + if sys.stdin.isatty(): + raise RuntimeError( + u'Please pipe the key:comment pairs in or use -e switch.') + else: + vpre(VERBOSITY_WARNINGS, u'WARNING: Nothing to do') + + pre(u'The following comments are going to be deleted:\n') + + to_delete = [] + to_delete_ids = set() + for issue_key, comment_ids in job: + if not comment_ids: + vpre(VERBOSITY_WARNINGS, + u'WARNING: Will not delete all comments '\ + u'for issue \'%s\'' % issue_key) + continue + + issue = cache.get(issue_key) + if not issue: + vpre(VERBOSITY_WARNINGS, + 'WARNING: issue \'%s\' not found - skipped' % issue_key) + continue + issue = issue[0] + + for comment in issue.fields.comment.comments: + if not comment_ids or comment.id in comment_ids: + to_delete.append((issue, comment)) + key = u'%s:%s' % (issue.key, comment.id) + if key not in to_delete_ids: + pre(tpl.format_issue_comments_item(tpl, issue, + comment, show_issue_key=True)) + to_delete_ids.add(key) + + pre() + + if not to_delete: + vpre(VERBOSITY_WARNINGS, u'WARNING: Nothing to do.') + return 0 + + if Util.confirm(u'Are you sure you want to delete them? (type YES)', + strict=True): + for issue, comment in to_delete: + try: + comment.delete() + except Exception, e: + vpre(VERBOSITY_ERRORS, + u'Unable to delete comment %s:%s: %s' % ( + issue.key, comment.id, str(e))) + cache.fetch([issue.key for issue, _ in to_delete]) + else: + vpre(VERBOSITY_WARNINGS, u'Not deleting.') + + return 0 + + +EDIT_COMMENTS_PROMPT = """\ +# Please edit the comment%s listed below. You may also add additional +# comments you would like to edit with their respective new text, or +# remove ones you would not like to edit. +# +# Leave the file intact or delete all its content to cancel the +# operation. + +""" + +def cmd_comments_edit(cfg): + cache = Cache(cfg) + tpl = Template(cfg, cache) + + if cache.mode == Cache.MODE_OFFLINE: + raise RuntimeError( + u'Can\'t edit comments in offline mode yet') + + listed_comments = \ + Util.unwrap_list_of_lists(cfg.o.get('query.args',())) + + job = Util.parse_comment_ids(listed_comments) + + vpre(VERBOSITY_INFO, + u'The following comments are going to be edited:\n') + + to_edit = [] + to_edit_ids = set() + for issue_key, comment_ids in job: + if not comment_ids: + vpre(VERBOSITY_WARNINGS, + u'WARNING: Will not edit all comments '\ + u'for issue \'%s\'' % issue_key) + continue + + issue = cache.get(issue_key) + if not issue: + vpre(VERBOSITY_WARNINGS, + u'WARNING: issue \'%s\' not found - skipped' % \ + issue_key) + continue + issue = issue[0] + + for comment in issue.fields.comment.comments: + if not comment_ids or comment.id in comment_ids: + to_edit.append((issue, comment)) + key = u'%s:%s' % (issue.key, comment.id) + if key not in to_edit_ids: + vpre(VERBOSITY_INFO, + tpl.format_issue_comments_item(tpl, issue, + comment, show_issue_key=True)) + to_edit_ids.add(key) + + vpre(VERBOSITY_INFO, u'') + + if not to_edit: + vpre(VERBOSITY_WARNINGS, u'WARNING: Nothing to do.') + return 0 + + # editor based + if cfg.o.get('cl.use_editor', False): + raise RuntimeError( + u'Comment editing using an editor is not implemented yet.') + # generate textual representation of comments + # invoke editor + # parse edited file + # show changes and get confirmation + # apply changes if any + + # stdin based + else: + if sys.stdin.isatty(): + raise RuntimeError( + u'Please pipe the new comment bodies in or use -e switch') + + stdin = FileTextIterator(sys.stdin) + _, comment_body = jicML.parse_nvpair(stdin) + if comment_body is None or not comment_body.strip(): + raise RuntimeError( + u'ERROR: Not replacing with an empty comment') + + for issue, comment in to_edit: + try: + # Workaround replacing the line below (not yet + # implemented in jira-python) + # comment.update(stripped) + data = { 'body': comment_body } + super(Comment, comment).update(**data) + except Exception, e: + vpre(VERBOSITY_ERRORS, + u'ERROR: Unable to edit comment %s for issue '\ + u'\'%s\': %s' % (comment.id, issue.key, str(e))) + + # if there are no more comments - use the last one + _, next_comment = jicML.parse_nvpair(stdin) + if next_comment is not None: + comment_body = next_comment + + vpre(VERBOSITY_INFO, + u'Fetching %d updated issues...' % len(to_edit), + end=u'') + cache.fetch([issue.key for issue, _ in to_edit]) + vpre(VERBOSITY_INFO, u' - done!') + + return 0 + + +def cmd_comments_list(cfg): + listed_issues = \ + Util.unwrap_list_of_lists(cfg.o.get('query.args',())) + + cache = Cache(cfg) + tpl = Template(cfg, cache) + + issues = cache.get( + listed_issues, + cfg.o.get('query.down', False), + cfg.o.get('query.up', False), + cfg.o.get('query.down_from'), + cfg.o.get('query.up_from'), + cfg.o.get('query.include_self', False), + cfg.o.get('query.link_types'), + cfg.o.get('query.depth'), + cfg.o.get('display.order_by'), + cfg.o.get('query.number_of_items'), + cfg.o.get('query.filter'), + cfg.o.get('query.jql')) + + first = True + for issue in issues: + if first: + first = False + else: + pr() + pr(tpl.format_issue_header(tpl, issue), end='') + tpl.print_issue_comments(tpl, issue) + + return 0 + + +def cmd_comments_reply(cfg): + raise RuntimeError(u'Not implemented yet.') + pre(u'WIP:cmd_comments_reply') + return 0 + + +def cmd_comments_show(cfg): + listed_comments = \ + Util.unwrap_list_of_lists(cfg.o.get('query.args',())) + + job = Util.parse_comment_ids(listed_comments) + + cache = Cache(cfg) + tpl = Template(cfg, cache) + + for issue_key, comment_ids in job: + issue = cache.get(issue_key) + if not issue: + vpre(VERBOSITY_WARNINGS, + 'WARNING: issue \'%s\' not found - skipped' % issue_key) + continue + issue = issue[0] + for comment in issue.fields.comment.comments: + if not comment_ids or comment.id in comment_ids: + if cfg.o.get('display.raw', False): + #pr(jicML.emit_value( + #comment.body, force_markers=True)) + pr(jicML.emit_value(comment.body, + force_markers=False)) + else: + pr(tpl.format_issue_comments_item(tpl, issue, + comment, show_issue_key=True)) + return 0 + + +def cmd_commands_symlink(cfg): + commands = cfg.o.get('commands') + if not commands: + raise RuntimeError( + u'No porcelain commands defined') + + jic_file = Util.get_jic_file() + jic_location = Util.get_jic_location() + symlink_location = os.path.expanduser(os.path.expandvars( + cfg.o.get('symlink.location', jic_location))) + + symlink_mode = cfg.o.get('symlink.mode') + if symlink_mode is None: + symlink_mode = 0700 + + try: + os.makedirs(symlink_location, symlink_mode) + except OSError, e: + if e.errno != errno.EEXIST: + raise e + + need_su = False + for cmd_name in commands.iterkeys(): + symlink_name = symlink_location + os.sep + cmd_name + vpre(VERBOSITY_INFO, + u'Creating a symlink for %s...' % cmd_name) + try: + if os.access(symlink_name, os.R_OK): + os.remove(symlink_name) + os.symlink(jic_file, symlink_name) + except OSError, e: + if e.errno == errno.EPERM: + need_su = True + vpre(VERBOSITY_ERRORS, + u'ERROR: Unable to create a symlink '\ + u'for \'%s\' at %s: %s' % ( + cmd_name, symlink_name, str(e))) + + if need_su: + vpre(VERBOSITY_ERRORS, + u'Please repeat the same command as superuser '\ + u'or change the `symlink_location` config option '\ + u'to point to a directory in your PATH that you '\ + u'have permission to modify.') + + +def cmd_configuration_edit(cfg): + editor = cfg.o.get('editor', 'sensible-editor') + Util.edit_file(editor, cfg.config_file) + + +# TODO: get from template +CREATE_ISSUES_PROMPT = """\ +# Please edit the field values for the new issue%s listed below. +# You may also add additional issues you would like to create and/or +# fields you would like to provide values for with their respective new +# values, or remove ones you would not like to create. +# +# New issues should have 'NEW-' prefix in their keys. +# +# Leave the file intact or delete all its content to cancel the +# operation. + +""" + +def cmd_issues_create(cfg): + cache = Cache(cfg) + + if cache.mode == Cache.MODE_OFFLINE: + raise RuntimeError( + u'Can\'t create issues in offline mode yet') + + arguments = \ + Util.unwrap_list_of_lists(cfg.o.get('query.args',())) + + if not cfg.o.get('cl.use_editor', False) \ + or not sys.stdin.isatty() \ + or not sys.stdout.isatty(): + raise RuntimeError( + u'Issue creation is only sopported using an editor for now') + + parent_issues = [] + projects_to_create_issues_in = [] + issues_to_edit = [] + + go_down = cfg.o.get('query.down', False) + + down_from = cfg.o.get('query.down_from', []) + if down_from: + parent_issues.extend(down_from) + + issue_types = cfg.o.get('query.issue_types') + if not issue_types: + raise RuntimeError( + u'One or more issue type need to be specified '\ + u'to create issues') + + # get a list of issue IDs to create issues with + # validate issues listed + for issue in arguments: + if Util.is_issue_key(issue): + if go_down: + parent_issues.append(issue) + else: + issues_to_edit.append(issue) + else: + projects_to_create_issues_in.append(issue) + + if not projects_to_create_issues_in and not issues_to_edit: + vpre(VERBOSITY_WARNINGS, u'WARNING: Nothing to do.') + return 0 + + tpl = Template(cfg, cache) -def print_issue_oneliner(issue, prefix=u''): - resolution = u'' - if 'resolution' in issue.fields.__dict__: - if issue.fields.resolution is not None: - resolution = u' / ' + issue.fields.resolution.name - pr(u'%s%s: %s - %s - %s%s' % ( - prefix, - get_issue_type(issue), issue.key, - issue.fields.summary, - issue.fields.status.name, - resolution - )) - -def cmd_list_issues(cfg, args): - assigned = False - reported = False - users = [] - in_error = False - if len(args): - for arg in args: - larg = arg.lower() - if larg in ('a', 'as', 'ass', 'assi', 'assig', 'assign', - 'assigne', 'assigned'): - assigned = True - elif larg in ('r', 're', 'rep', 'repo', 'repor', 'report', - 'reporte', 'reported'): - reported = True - elif larg in ('a', 'al', 'all'): - assigned = True - reported = True - elif larg in ('m', 'me', 'my', 'mi', 'min', 'mine'): - users.append(cfg.user) - else: - users.append(arg) - if not (assigned or reported): - assigned = True - - if not len(users): - users.append(cfg.user) - - for user in users: - query = u'' - if assigned: - query_type = u'assigned' - query += 'assignee="%s"' % user - if reported: - query_type += u' and/or reported' - query += ' OR reporter="%s"' % user - else: - query += 'reporter="%s"' % user - query_type = u'reported' - - pre('Query: ' + query) - try: - jira = get_jira(cfg) - issues = jira.search_issues( - query, - fields='summary,status,issuetype,resolution,assignee,reporter', - maxResults=-1) - except JIRAError, e: - pr(error_message(e)) - return 1 - if len(issues): - if user == cfg.user: - pr(u'You have %d %s issues:' % \ - (len(issues), query_type)) - else: - pr('%s has %d %s issues:' % \ - (user, len(issues), query_type)) - for issue in issues: - if reported and assigned: - prefix = u'A' \ - if issue.fields.assignee.name == user else u'' - prefix += u'R' \ - if issue.fields.reporter.name == user else u'' - prefix = u'[%s] ' % prefix + edc = EditingContext(cfg, cache, tpl) + + # fetch existing ones (if any) + if parent_issues: + vpre(VERBOSITY_INFO, + u'Fetching %d parent issues...' % len(parent_issues), + end=u'') + parent_issues = cache.fetch(parent_issues) + + if issues_to_edit: + vpre(VERBOSITY_INFO, + u'Fetching %d issues for editing...' % len(issues_to_edit), + end=u'') + issues_to_edit = cache.fetch(issues_to_edit) + + + ##### the code below should be refactored and relocated into + ##### corresponding classes + + issues_to_create = [] + issue_counters = {} + + for project in projects_to_create_issues_in: + issue_counters[project] = 1 + + # process all the issues being created + first = True + jicml_created = u'' + for parent_issue in parent_issues: + for issue_type in issue_types: + for project in projects_to_create_issues_in: + # determine the fields for each new issue: + # first - the always required ones... + fields_to_show = \ + [u'issuekey', u'summary', u'issuetype', + u'project'] + fields_processed = set(fields_to_show) + fields_processed.add(u'key') + + # ...then - required ones that are not listed by + # the template... + template_fields = tpl.get_field_list_for_issue_creation( + tpl, parent_issue, issue_type, project) + template_fields = [edc.field_meta.get_field_id(name, name) \ + for name in template_fields] + all_fields = cache.create_meta.\ + get_project_issue_type_fields( + project, issue_type) + required_fields = [ \ + field_id \ + for field_id, field_def in all_fields.iteritems() \ + if field_def['required'] ] + covered_by_template = set(fields_processed) + covered_by_template.update(template_fields) + + for field_name in required_fields: + if field_name not in covered_by_template: + fields_to_show.append(field_name) + fields_processed.add(field_name) + + for field_name in covered_by_template: + if field_name not in fields_processed: + fields_to_show.append(field_name) + fields_processed.add(field_name) + + # ...and then the ones from template + for field_name in template_fields: + if field_name not in fields_processed: + fields_to_show.append(field_name) + fields_processed.add(field_name) + + issue_key = 'NEW-%s-%d' % ( + project, issue_counters[project]) + issue_counters[project] += 1 + + mockup = edc.create_issue_mockup( + issue_key, parent_issue, issue_type, project) + + mockup.set('fields.reporter', cache.user) + + issues_to_create.append((mockup, fields_to_show)) + + if first: + first = False else: - prefix = u'' - print_issue_oneliner(issue, prefix) - else: - if user == cfg.user: - pr(u'You have no %s issues!' % query_type) - else: - pr(u'%s has no %s issues!' % (user, query_type)) - pr() - return 0 + jicml_created += u'\n\n' -QUOTES = ('\'', '"') + fields_meta = \ + edc.create_meta.get_project_issue_type_fields( + project, issue_type) + + jicml_created += edc.prepare_new_issue_for_editing( + mockup, fields_to_show, + fields_meta) + + jicml = CREATE_ISSUES_PROMPT % ( + u's' if len(issues_to_create) != 1 else u'') + jicml += jicml_created + if issues_to_edit: + jicml += u'\n\n' + EDIT_ISSUES_PROMPT % ( + u's' if len(issues_to_edit) != 1 else u'') + jicml += edc.prepare_issues_for_editing(issues_to_edit) + + # invoke an editor + editor = cfg.o.get('editor', 'sensible-editor') + edited_jicml = Util.get_from_editor( + cfg, editor, jicml, file_suffix=u'jicml') -def quote_query_values(query): - processed = u'' - getting_value = False - quoted_value = False - escape_next = False - value = u'' - for ch in query: - if getting_value: - if ch == '\\': - escape_next = True - continue + if not edited_jicml: + vpre(VERBOSITY_INFO, u'Cancelled by the user') + return 0 - if escape_next: - value += ch - escape_next = False - continue + # parse issues + parsed_issues = edc.parse_edited_issues(edited_jicml) - if quoted_value: - if ch in QUOTES: - processed += u'"' + value + '"' - getting_value = False - quoted_value = False + # generate jobs + create_job = edc.generate_create_job() + update_job = edc.generate_update_job() + + if create_job: + pre(u'New issues to be created:') + issue_keys = sorted(update_job.keys()) + for issue in issue_keys: + fields = update_job.get(issue) + pre(u'* %s' % issue) + issue_fields = sorted(fields.keys()) + for fid in issue_fields: + value = fields.get(fid) + fname = cache.fields.get_field_name(fid, fid) + if not value: value = u'' - continue else: - value += ch - continue - else: - if not len(value) and ch in QUOTES: - quoted_value = True - continue - elif ch.isspace(): - processed += u'"' + value + u'" ' - getting_value = False - quoted_value = False + if type(value) in (list, tuple): + items = [] + for item in value: + if type(item) == dict: + if item: + items.append(item.get(item.keys()[0])) + else: + items.append(str(item)) + value = u', '.join(items) + elif type(value) == dict: + if 'name' in value: + value = value.get('name', value) + elif 'key' in value: + value = value.get('key', value) + else: + value = value.get(value.keys()[0]) + if not value: + value = u'' + value = jicML.emit_value(value) + pre(u' - %s: %s' % (fname, value)) + + # show update job and get user's confirmation + if update_job: + pre(u'\n\nPending changes to be applied:') + issue_keys = sorted(update_job.keys()) + for issue in issue_keys: + fields = update_job.get(issue) + pre(u'* %s' % issue) + issue_fields = sorted(fields.keys()) + for fid in issue_fields: + value = fields.get(fid) + fname = cache.fields.get_field_name(fid, fid) + if not value: value = u'' else: - value += ch - else: - if ch == '\\': - escape_next = True - continue + if type(value) in (list, tuple): + items = [] + for item in value: + if type(item) == dict: + if item: + items.append(item.get(item.keys()[0])) + else: + items.append(str(item)) + value = u', '.join(items) + elif type(value) == dict: + if 'name' in value: + value = value.get('name', value) + elif 'key' in value: + value = value.get('key', value) + else: + value = value.get(value.keys()[0]) + if not value: + value = u'' + value = jicML.emit_value(value) + pre(u' - %s: %s' % (fname, value)) - if escape_next: - processed += ch - escape_next = False - continue + if not Util.confirm(u'\nWould you like to apply them? (y/n)'): + return 1 - if ch == u'=': - processed += ch - getting_value = True - continue + # execute create job + created_issues = [] + if create_job: + for issue_key, fields in create_job.iteritems(): + pr('%s: %s' % (issue_key, repr(fields))) + vpre(VERBOSITY_INFO, u'Creating %s...' % issue_key, end=u'') + issue = cache.create(fields) + created_issues.append(issue.key) + pre(VERBOSITY_ERRORS, u'\nCreated new issue for alias %s: %s' % \ + (issue_key, issue.key)) + vpre(VERBOSITY_INFO, u'Done!') + + # execute the job + if update_job: + for issue_key, fields in update_job.iteritems(): + vpre(VERBOSITY_INFO, u'Updating %s...' % issue_key, end=u'') + issue = cache.get(issue_key)[0] + issue.update(**fields) + vpre(VERBOSITY_INFO, u' - done!') + + issues = created_issues + issues.extend(update_job.keys()) + vpre(VERBOSITY_INFO, u'Fetching %d issues...' % len(issues), end=u'') + cache.fetch(issues) + vpre(VERBOSITY_INFO, u' - done!') - processed += ch + return 0 - if len(value) and getting_value: - if quoted_value: - pr('Unbalanced quotes in query - restoring the balance') - processed += u'"' + value + u'"' - else: - processed += u'"' + value + u'"' - return processed.strip() +# TODO: get from template +EDIT_ISSUES_PROMPT = """\ +# Please edit the issue%s listed below. You may also add additional +# issues and/or fields you would like to edit with their respective new +# values, or remove ones (or just kip them intact) you would not like to +# edit. +# +# Leave the file intact or delete all its content to cancel the +# operation. -PROMPT_GET_JQL_QUERY = \ -""" -# Please enter your JQL query into this file. -# Leave the file intact to cancel the operation. -# Lines starting with '#' are comments and will not be used. """ -def cmd_query_issues(cfg, args): - in_error = False - if not len(args): - text = get_from_editor(cfg, PROMPT_GET_JQL_QUERY) - if not text: - pr('Aborted by the user.') - return 0 - - text = text.decode('utf-8') - query = u'' - for line in text.splitlines(False): - sline = line.strip() - if not len(sline) or sline.startswith('#'): - continue - if query: - query += u' ' - query += sline - else: - query = u' '.join(args) +def cmd_issues_edit(cfg): + cache = Cache(cfg) - query = quote_query_values(query) + if cache.mode == Cache.MODE_OFFLINE: + raise RuntimeError( + u'Can\'t edit issues in offline mode yet') + + listed_issues = \ + Util.unwrap_list_of_lists(cfg.o.get('query.args',())) + + if not cfg.o.get('cl.use_editor', False) \ + or not sys.stdin.isatty() \ + or not sys.stdout.isatty(): + raise RuntimeError( + u'Issue editing is only sopported using an editor for now') + + # validate issues listed + to_edit = [] + for issue in listed_issues: + if not Util.is_issue_key(issue): + vpre(VERBOSITY_WARNINGS, + u'Not an issue key: \'%s\'. Skipped.' % issue) + continue + to_edit.append(issue) - pre('Query: ' + query) - try: - jira = get_jira(cfg) - issues = jira.search_issues( - query, - fields='summary,status,issuetype,resolution,assignee,reporter', - maxResults=-1) - except JIRAError, e: - pr(error_message(e)) + if not to_edit: + vpre(VERBOSITY_WARNINGS, u'WARNING: Nothing to do.') + return 0 + + tpl = Template(cfg, cache) + + edc = EditingContext(cfg, cache, tpl) + + # fetch issues + vpre(VERBOSITY_INFO, + u'Fetching %d issues for editing...' % len(to_edit), + end=u'') + issues = cache.fetch(to_edit) + vpre(VERBOSITY_INFO, u' - done!') + + # emit jicML for all the issues + jicml = EDIT_ISSUES_PROMPT % (u's' if len(to_edit) != 1 else u'') + jicml += edc.prepare_issues_for_editing(issues) + + # invoke editor + editor = cfg.o.get('editor', 'sensible-editor') + edited_jicml = Util.get_from_editor( + cfg, editor, jicml, file_suffix=u'jicml') + + if not edited_jicml: + vpre(VERBOSITY_INFO, u'Cancelled by the user') + return 0 + + # parse issues + parsed_issues = edc.parse_edited_issues(edited_jicml) + + # generate job + update_job = edc.generate_update_job() + + # show job and get user's confirmation + pre(u'Pending changes to be applied:') + issue_keys = sorted(update_job.keys()) + for issue in issue_keys: + fields = update_job.get(issue) + pre(u'* %s' % issue) + issue_fields = sorted(fields.keys()) + for fid in issue_fields: + value = fields.get(fid) + fname = cache.fields.get_field_name(fid, fid) + if not value: + value = u'' + else: + if type(value) in (list, tuple): + items = [] + for item in value: + if type(item) == dict: + if item: + items.append(item.get(item.keys()[0])) + else: + items.append(str(item)) + value = u', '.join(items) + elif type(value) == dict: + if 'name' in value: + value = value.get('name', value) + elif 'key' in value: + value = value.get('key', value) + else: + value = value.get(value.keys()[0]) + if not value: + value = u'' + value = jicML.emit_value(value) + #pre(u' - %s: %s' % (fname, repr(value))) + pre(u' - %s: %s' % (fname, value)) + + if not Util.confirm(u'\nWould you like to apply them? (y/n)'): return 1 - if len(issues): - for issue in issues: - print_issue_oneliner(issue, u'') - pr() - return 0 -def print_issue_links(issue, go_up, go_down, prefix=u''): - links = issue.fields.issuelinks - for link in links: - if 'inwardIssue' in link.__dict__: - # link to a child - if not go_down: - continue - icon = '-' - name = link.type.inward - other = link.inwardIssue - else: - # link to the parent - if not go_up: - continue - icon = '+' - name = link.type.outward - other = link.outwardIssue - print_issue_oneliner(other, - '%s%s %s ' % (prefix, icon, name)) - return 0 + # execute the job + for issue_key, fields in update_job.iteritems(): + vpre(VERBOSITY_INFO, u'Updating %s...' % issue_key, end=u'') + issue = cache.get(issue_key)[0] + issue.update(**fields) + vpre(VERBOSITY_INFO, u' - done!') + + issues = update_job.keys() + vpre(VERBOSITY_INFO, u'Fetching %d issues...' % len(issues), end=u'') + cache.fetch(issues) + vpre(VERBOSITY_INFO, u' - done!') -def cmd_list_links(cfg, args): - jira = get_jira(cfg) - in_error = False - for arg in args: - if is_issue_id(arg): - if not in_error: - try: - issue = jira.issue(arg) - print_issue_links(issue, True, True, u' ') - except JIRAError, e: - in_error = True - pr(u'%s - %s' % (arg, error_message(e))) - in_error = True - else: - pr('Invalid argument: ' + arg) - in_error = True - return 1 if in_error else 0 + return 0 -# tree line symbol indices -TREE_LINE_HL = 0 -TREE_LINE_VL = 1 -TREE_LINE_LA = 2 -TREE_LINE_RA = 3 -TREE_LINE_UA = 4 -TREE_LINE_DA = 5 -TREE_LINE_TL = 6 -TREE_LINE_TC = 7 -TREE_LINE_TR = 8 -TREE_LINE_LC = 9 -TREE_LINE_MC = 10 -TREE_LINE_RC = 11 -TREE_LINE_BL = 12 -TREE_LINE_BC = 13 -TREE_LINE_BR = 14 - -def print_link(source, link, extra=''): - res = u'Link {id: %s; source: %s' % (link.id, source.key) - if 'inwardIssue' in link.__dict__: - res += u'; inwardIssue: %s' % link.inwardIssue.key - if 'outwardIssue' in link.__dict__: - res += u'; outwardIssue: %s' % link.outwardIssue.key - res += u'; type: %s (%s)' % ( - link.type.name, - link.type.inward \ - if 'inwardIssue' in link.__dict__ \ - else link.type.outward) - res += u'}%s' % extra - pr(res) - - -def print_issue_tree(cfg, jira, seed_issue, link_types, maxdepth, visit_parents, - visit_children, stick_to_the_project, prefix=''): - direction = 'both' - if visit_parents: - if not visit_children: - direction = 'up' - elif visit_children: - if not visit_parents: - direction = 'down' - - trees = [] - head_index = {} - issue_index = {} - traversed_links = set() - - project = seed_issue.fields.project.id \ - if stick_to_the_project \ - else None - - downlink_queue = [ - # parent_issue, direction, link_type, issue, depth - (None, direction, None, seed_issue, 0), - ] - - uplink_queue = [ - # child_issue, direction, link_type, issue, depth - ] - - pre('Building the tree', end=u'') - stderr.flush() - - progress_msg = u'' - progress_msg_len = len(progress_msg) - # process child issues first - while len(downlink_queue): - parent_issue, direction, link_type, issue, depth = \ - downlink_queue.pop(0) - - progress_msg = '\rBuilding the tree: ' + issue.key - new_progress_msg_len = len(progress_msg) - progress_msg += ' ' * (\ - (progress_msg_len - len(progress_msg)) \ - if progress_msg_len - len(progress_msg) > 0 \ - else 0) - progress_msg_len = new_progress_msg_len - pre(progress_msg, end='') - stderr.flush() - - go_up = direction in ('up', 'both') - go_down = direction in ('down', 'both') - - if parent_issue is None: - # get a real object, not a proxy - node = (issue, []) - issue_index[issue.key] = node - trees = [node,] - head_index[issue.key] = node +def cmd_issues_fetch(cfg): + if cache.mode == Cache.MODE_OFFLINE: + raise RuntimeError( + u'Can\'t fetch issues in offline mode') + + listed_issues = \ + Util.unwrap_list_of_lists(cfg.o.get('query.args',())) + + cache = Cache(cfg) + tpl = Template(cfg, cache) + + cache.fetch(listed_issues, + cfg.o.get('query.down', False), + cfg.o.get('query.up', False), + cfg.o.get('query.down_from'), + cfg.o.get('query.up_from'), + cfg.o.get('query.include_self', False), + cfg.o.get('query.link_types'), + cfg.o.get('query.depth'), + cfg.o.get('display.order_by'), + cfg.o.get('query.number_of_items'), + cfg.o.get('query.filter'), + cfg.o.get('query.jql')) - if maxdepth > 0 and depth == maxdepth: - continue - # bootstrapping the queue - # - # this is the first node to build the tree from - # tree is created with just this node being the only one - links = issue.fields.issuelinks - for link in links: - is_downlink = ('inwardIssue' in link.__dict__) - if not is_downlink and not go_up: - continue - if is_downlink and not go_down: - continue - if link_types is not None \ - and link.type.name not in link_types: - continue - linked_issue = link.inwardIssue \ - if is_downlink \ - else link.outwardIssue - # check if target issue is already cached - cached_target = issue_index.get(linked_issue.key, - None) - if cached_target: - node[1].append((cached_target, link_type)) - traversed_links.add(link.id) - continue - if link.id in traversed_links: - continue - traversed_links.add(link.id) - # populate all fields as link objects contain - # minimal issue objects - try: - linked_issue = jira.issue(linked_issue.key) - except JIRAError, e: - pr(u'%s - %s' % \ - (linked_issue.key, error_message(e))) - continue + return 0 - if project is not None \ - and linked_issue.fields.project.id != project: - continue - if is_downlink: - downlink_queue.append( - (issue, 'down', - link.type.inward, linked_issue, depth + 1)) - else: - uplink_queue.append( - (issue, 'up', - link.type.inward, linked_issue, depth + 1)) +def cmd_issues_fields(cfg): + partial_names = \ + Util.unwrap_list_of_lists(cfg.o.get('query.args',())) + + cache = Cache(cfg) + tpl = Template(cfg, cache) + + to_match = [name.lower() for name in partial_names] + + matched = False + for field in cache.fields.field_meta: + if partial_names: + id = field.get('id').lower() + name = field.get('name').lower() + for part_name in to_match: + if id.find(part_name) != -1 \ + or name.find(part_name) != -1: + matched = True + pr(tpl.format_issue_field_meta(tpl, field)) + break else: - # link based queue processing - # - # only downlink nodes are processed here - parent_node = issue_index.get(parent_issue.key, None) - if parent_node is None: - pr('Error: broken index, missing %s' % parent_issue.key) - continue + matched = True + pr(tpl.format_issue_field_meta(tpl, field)) + if not matched: + pr(u'No fields matched: %s' % (u', '.join(partial_names))) + + +def cmd_issues_forget(cfg): + listed_issues = \ + Util.unwrap_list_of_lists(cfg.o.get('query.args',())) + + cache = Cache(cfg) + + cache.forget(listed_issues) + + +def cmd_issues_list(cfg): + listed_issues = \ + Util.unwrap_list_of_lists(cfg.o.get('query.args',())) + + cache = Cache(cfg) + tpl = Template(cfg, cache) + + issues = cache.get( + listed_issues, + cfg.o.get('query.down', False), + cfg.o.get('query.up', False), + cfg.o.get('query.down_from'), + cfg.o.get('query.up_from'), + cfg.o.get('query.include_self', False), + cfg.o.get('query.link_types'), + cfg.o.get('query.depth'), + cfg.o.get('display.order_by'), + cfg.o.get('query.number_of_items'), + cfg.o.get('query.filter'), + cfg.o.get('query.jql')) - # check if target issue is already cached - cached_issue = issue_index.get(issue.key, None) - if cached_issue: - parent_node[1].append((cached_issue, link_type)) - continue + tpl.print_issue_list(tpl, issues) - node = (issue, []) - issue_index[issue.key] = node - parent_node[1].append((node, link_type)) + return 0 - if maxdepth > 0 and depth == maxdepth: - continue - links = issue.fields.issuelinks - for link in links: - if link_types is not None \ - and link.type.name not in link_types: - continue - is_downlink = ('inwardIssue' in link.__dict__) - if not is_downlink and not go_up: - continue - if is_downlink and not go_down: - continue - linked_issue = link.inwardIssue \ - if is_downlink \ - else link.outwardIssue - # check if target issue is already cached - cached_target = issue_index.get(linked_issue.key, - None) - if cached_target: - node[1].append((cached_target, link_type)) - traversed_links.add(link.id) - continue - if link.id in traversed_links: - continue - traversed_links.add(link.id) - # populate all fields as link objects contain - # minimal issue objects - try: - linked_issue = jira.issue(linked_issue.key) - except JIRAError, e: - pr(u'%s - %s' % \ - (linked_issue.key, error_message(e))) - continue +def cmd_issues_open(cfg): + cache = Cache(cfg) - if project is not None \ - and linked_issue.fields.project.id != project: - continue + listed_issues = \ + Util.unwrap_list_of_lists(cfg.o.get('query.args',())) - if is_downlink: - downlink_queue.append( - (issue, 'down', - link.type.inward, linked_issue, depth + 1)) - else: - uplink_queue.append( - (issue, 'up', - link.type.inward, linked_issue, depth + 1)) - - # now process one or more parent trees - if visit_parents: - while len(uplink_queue): - child_issue, direction, link_type, issue, depth = \ - uplink_queue.pop(0) - - progress_msg = '\rBuilding the tree: ' + issue.key - new_progress_msg_len = len(progress_msg) - progress_msg += ' ' * (\ - (progress_msg_len - len(progress_msg)) \ - if progress_msg_len - len(progress_msg) > 0 \ - else 0) - progress_msg_len = new_progress_msg_len - pre(progress_msg, end='') - stderr.flush() - - go_up = direction in ('up', 'both') - go_down = direction in ('down', 'both') - - child_node = issue_index.get(child_issue.key, None) - if child_node is None: - pr('Error: broken index, missing %s' % child_issue.key) - continue - node = (issue, [(child_node, link_type),]) - issue_index[issue.key] = node - head_index[issue.key] = node - if child_issue.key in head_index: - del head_index[child_issue.key] + browser = cfg.o.get('browser', 'sensible-browser') + server_url = cache.srv_cfg.get('url') + if not server_url: + raise RuntimeError( + u'Internal Error: missing server url') + + if not listed_issues: + vargs = (browser, server_url) + subprocess.Popen(vargs, stdout=open('/dev/null', 'w'), + stderr=subprocess.STDOUT) + return 0 - if depth == maxdepth: - continue + for issue in listed_issues: + url = server_url + '/browse/%s' % issue + vargs = (browser, url) + subprocess.Popen(vargs, stdout=open('/dev/null', 'w'), + stderr=subprocess.STDOUT) - links = issue.fields.issuelinks - for link in links: - if link_types is not None \ - and link.type.name not in link_types: - continue - is_downlink = ('inwardIssue' in link.__dict__) - if not is_downlink and not go_up: - continue - if is_downlink and not go_down: - continue - linked_issue = link.inwardIssue \ - if is_downlink \ - else link.outwardIssue - # check if target issue is already cached - cached_target = issue_index.get(linked_issue.key, - None) - if cached_target: - node[1].append((cached_target, link_type)) - traversed_links.add(link.id) - continue - if link.id in traversed_links: - continue - traversed_links.add(link.id) - # populate all fields as link objects contain - # minimal issue objects - try: - linked_issue = jira.issue(linked_issue.key) - except JIRAError, e: - pr(u'%s - %s' % \ - (linked_issue.key, error_message(e))) - continue - if not is_downlink: - uplink_queue.append( - (issue, 'up', - link.type.inward, linked_issue, depth + 1)) - - # print trees - pre(u'\r' + (u' ' * progress_msg_len), end=u'\r') - for issue, node in head_index.iteritems(): - print_tree_node(cfg, node) - - -def print_tree_node(cfg, node, node_prefix='', child_prefix=''): - print_issue_oneliner(node[0], node_prefix) - - last = len(node[1]) - 1 - for idx, (child, link) in enumerate(node[1]): - if idx < last: - new_node_prefix = child_prefix + \ - cfg.tree_chars[TREE_LINE_LC] + cfg.tree_chars[TREE_LINE_HL] + u'> ' + \ - link + u' ' - new_child_prefix = child_prefix + \ - cfg.tree_chars[TREE_LINE_VL] + u' ' - else: - new_node_prefix = child_prefix + \ - cfg.tree_chars[TREE_LINE_BL] + cfg.tree_chars[TREE_LINE_HL] + u'> ' + \ - link + u' ' - new_child_prefix = child_prefix + u' ' - print_tree_node(cfg, child, new_node_prefix, new_child_prefix) - -def cmd_show_tree(cfg, args): - jira = get_jira(cfg) - issues = [] - go_up = None - go_down = None - stick_to_the_project = False - in_error = False - for arg in args: - if arg.startswith('-'): - if arg.startswith('-d'): - cfg.depth = int(arg[2:]) - elif arg.startswith('--depth='): - cfg.depth = int(arg[8:]) - else: - pr(u'Unknown option: ' + arg) - else: - larg = arg.lower() - if larg in ('u', 'up'): - go_up = True - elif larg in ('d', 'do', 'dow', 'down'): - go_down = True - elif larg in ('b', 'bo', 'bot', 'both'): - go_up = True - go_down = True - elif larg in ('a', 'al', 'all'): - stick_to_the_project = False - elif larg in ('p', 'pr', 'prj', 'pro', 'proj', 'proje', - 'projec', 'project'): - stick_to_the_project = True - elif is_issue_id(arg): - issues.append(arg) - else: - pr('Invalid argument: ' + arg) - in_error = True - - if go_up is None: - if go_down is None: - go_up = True - go_down = True - else: - go_up = False - else: - if go_down is None: - go_down = False - if not in_error: - for key in issues: - try: - issue = jira.issue(key) - print_issue_tree(cfg, jira, issue, None, cfg.depth, go_up, - go_down, stick_to_the_project) - except JIRAError, e: - pr(u'%s - %s' % (key, error_message(e))) - in_error = True + return 0 - return 1 if in_error else 0 -def cmd_show_issue(cfg, args): - jira = get_jira(cfg) - show_body = False - show_links = False - show_comments = False - show_history = False - in_error = False - issues = [] - for arg in args: - larg = arg.lower() - if larg in ('b', 'bo', 'bod', 'body'): - show_body = True - elif larg in ('l', 'li', 'lin', 'link', 'links'): - show_links = True - elif larg in ('c', 'co', 'com', 'comm', 'comme', 'commen', - 'comment', 'comments'): - show_comments = True - elif larg in ('h', 'hi', 'his', 'hist', 'histo', 'histor', - 'history'): - show_history = True - elif larg in ('a', 'al', 'all'): - show_body = True - show_links = True - show_comments = True - show_history = True - elif is_issue_id(arg): - issues.append(arg) - else: - pr('Invalid argument: ' + arg) - in_error = True +def cmd_issues_pull(cfg): + cache = Cache(cfg) - if not (show_body or show_links or show_comments or show_history): - show_body = True + cache.pull() - if not in_error: - for key in issues: - try: - issue = jira.issue( - key, - expand=('changelog' if show_history else '')) - print_issue(cfg, issue, show_body, show_links, - show_comments, show_history) - except JIRAError, e: - pr(u'%s - %s' % (key, error_message(e))) - in_error = True + return 0 - return 1 if in_error else 0 -def cmd_open_issue(cfg, args): - browser = cfg.browser - in_error = False - for id in args: - if not is_issue_id(id): - pr('Invalid argument: ' + unicode(id)) - in_error = True - if not in_error: - pr('Opening %s using %s...' % (id, browser)) - url = cfg.server + '/browse/' + id - vargs = (browser, url) - subprocess.Popen(vargs, stdout=open('/dev/null', 'w'), - stderr=subprocess.STDOUT) - return 1 if in_error else 0 - -def cmd_show_help(cfg, args): - pr( -u"""JIC v%s - JIRA CLI client for your convenience - -Allows using JIRA without a mouse and a massive web browser. - - -Usage: jic [