[Python-apps-commits] r4310 - in packages/trac/branches/lenny-backports/debian/patches (3 files)
debacle at users.alioth.debian.org
debacle at users.alioth.debian.org
Thu Dec 17 14:18:43 UTC 2009
Date: Thursday, December 17, 2009 @ 14:18:42
Author: debacle
Revision: 4310
Add patches to backport.
Added:
packages/trac/branches/lenny-backports/debian/patches/40_timeline_author_filter.dpatch
(from rev 4309, packages/trac/trunk/debian/patches/40_timeline_author_filter.dpatch)
packages/trac/branches/lenny-backports/debian/patches/50_sqlitetopg_script.dpatch
(from rev 4309, packages/trac/trunk/debian/patches/50_sqlitetopg_script.dpatch)
packages/trac/branches/lenny-backports/debian/patches/60_roadmap_dbx_cursor_fix.dpatch
(from rev 4309, packages/trac/trunk/debian/patches/60_roadmap_dbx_cursor_fix.dpatch)
Copied: packages/trac/branches/lenny-backports/debian/patches/40_timeline_author_filter.dpatch (from rev 4309, packages/trac/trunk/debian/patches/40_timeline_author_filter.dpatch)
===================================================================
--- packages/trac/branches/lenny-backports/debian/patches/40_timeline_author_filter.dpatch (rev 0)
+++ packages/trac/branches/lenny-backports/debian/patches/40_timeline_author_filter.dpatch 2009-12-17 14:18:42 UTC (rev 4310)
@@ -0,0 +1,68 @@
+#! /bin/sh /usr/share/dpatch/dpatch-run
+## 40_timeline_author_filter.dpatch by <wmb at beron.tangosoft.com>
+##
+## All lines beginning with `## DP:' are a description of the patch.
+## DP: add author filter to timeline
+
+ at DPATCH@
+
+Index: trac/trac/timeline/web_ui.py
+===================================================================
+--- trac/trac/timeline/web_ui.py (revision 7937)
++++ trac/trac/timeline/web_ui.py (working copy)
+@@ -118,8 +118,12 @@
+ daysback = max(0, daysback)
+ if self.max_daysback >= 0:
+ daysback = min(self.max_daysback, daysback)
++ author = req.args.get('author',
++ req.session.get('timeline.author', ''))
++ author = author.strip()
+
+ data = {'fromdate': fromdate, 'daysback': daysback,
++ 'author': author,
+ 'today': format_date(today),
+ 'yesterday': format_date(today - timedelta(days=1)),
+ 'precisedate': precisedate, 'precision': precision,
+@@ -158,7 +162,9 @@
+ try:
+ for event in provider.get_timeline_events(req, start, stop,
+ filters):
+- events.append(self._event_data(provider, event))
++ author_index = len(event) < 6 and 2 or 4 # 0.10 events
++ if not author or event[author_index] == author:
++ events.append(self._event_data(provider, event))
+ except Exception, e: # cope with a failure of that provider
+ self._provider_failure(e, req, provider, filters,
+ [f[0] for f in available_filters])
+@@ -185,6 +191,7 @@
+ return 'timeline.rss', data, 'application/rss+xml'
+ else:
+ req.session['timeline.daysback'] = daysback
++ req.session['timeline.author'] = author
+ html_context = Context.from_request(req)
+ html_context.set_hints(wiki_flavor='oneliner',
+ shorten_lines=self.abbreviated_messages)
+@@ -192,7 +199,8 @@
+
+ add_stylesheet(req, 'common/css/timeline.css')
+ rss_href = req.href.timeline([(f, 'on') for f in filters],
+- daysback=90, max=50, format='rss')
++ daysback=90, max=50, author=author,
++ format='rss')
+ add_link(req, 'alternate', rss_href, _('RSS Feed'),
+ 'application/rss+xml', 'rss')
+
+Index: trac/trac/timeline/templates/timeline.html
+===================================================================
+--- trac/trac/timeline/templates/timeline.html (revision 7937)
++++ trac/trac/timeline/templates/timeline.html (working copy)
+@@ -18,7 +18,8 @@
+ <form id="prefs" method="get" action="">
+ <div>
+ <label>View changes from <input type="text" size="10" name="from" value="${format_date(fromdate)}" /></label> <br />
+- and <label><input type="text" size="3" name="daysback" value="$daysback" /> days back</label>.
++ and <label><input type="text" size="3" name="daysback" value="$daysback" /> days back</label><br />
++ <label>done by <input type="text" size="16" name="author" value="$author" /></label>
+ </div>
+ <fieldset>
+ <label py:for="filter in filters">
Copied: packages/trac/branches/lenny-backports/debian/patches/50_sqlitetopg_script.dpatch (from rev 4309, packages/trac/trunk/debian/patches/50_sqlitetopg_script.dpatch)
===================================================================
--- packages/trac/branches/lenny-backports/debian/patches/50_sqlitetopg_script.dpatch (rev 0)
+++ packages/trac/branches/lenny-backports/debian/patches/50_sqlitetopg_script.dpatch 2009-12-17 14:18:42 UTC (rev 4310)
@@ -0,0 +1,348 @@
+#! /bin/sh /usr/share/dpatch/dpatch-run
+## 50_sqlitetopostgresql_script.dpatch by <wmb at beron.tangosoft.com>
+##
+## All lines beginning with `## DP:' are a description of the patch.
+## DP: add script to migrate from SQlite to PostgreSQL
+
+ at DPATCH@
+
+diff -uraN trac-0.11.5.orig/contrib/sqlitetopgscript/sqlite2pg trac-0.11.5/contrib/sqlitetopgscript/sqlite2pg
+--- trac-0.11.5.orig/contrib/sqlitetopgscript/sqlite2pg 1970-01-01 01:00:00.000000000 +0100
++++ trac-0.11.5/contrib/sqlitetopgscript/sqlite2pg 2008-06-12 05:06:56.000000000 +0200
+@@ -0,0 +1,336 @@
++#!/usr/bin/env python
++
++# -*- coding: utf-8 -*-
++#
++# Copyright (C) 2008 John Hampton <pacopablo at pacopablo.com>
++# All rights reserved.
++#
++# This software is licensed as described in the file COPYING, which
++# you should have received as part of this distribution. The terms
++# are also available at http://trac.edgewall.com/license.html.
++#
++# This software consists of voluntary contributions made by many
++# individuals. For the exact contribution history, see the revision
++# history and logs, available at:
++# http://trac-hacks.org/wiki/SqliteToPgScript
++#
++# Basically, it boils down to: feel free to use/modify/distribute/etc.
++# However, give me credit where due. Also, if you like the script and
++# find it useful, buy me a soda or candy bar or something if ever we
++# meet. Thanks and enjoy.
++#
++# Author: John Hampton <pacopablo at pacopablo.com>
++
++
++import os, os.path, sys
++from trac.env import Environment, EnvironmentSetup, IEnvironmentSetupParticipant
++from trac.core import ComponentMeta
++from trac.db import DatabaseManager
++from optparse import OptionParser
++from psycopg2 import ProgrammingError, IntegrityError
++from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
++
++VERSION='0.10'
++
++class TableMigration(object):
++ """
++ Class to conatin all table migration functions
++ """
++ def __init__(self, sqlenv, pgenv, opts):
++ """ Create a TableMigration instance. Required are SQLite and
++ PostgreSQL environment objects
++ """
++ self.sqlenv = sqlenv
++ self.pgenv = pgenv
++ self.opts = opts
++ self.sdb = self.sqlenv.get_db_cnx()
++ self.pgdb = self.pgenv.get_db_cnx()
++ self.pgdb_schema = self.pgdb.schema
++ pass
++
++ def lookupMethod(self, table):
++ """ Get a reference to the function that handles migration for the
++ specified table
++ """
++ m = getattr(self, ''.join(['migrate_', table.upper()]), None)
++ if not m:
++ m = self.default_copy
++ return m
++
++ def cleanTable(self, table):
++ """ Clear the contents of the table """
++ cur = self.pgdb.cursor()
++ delete_from = "DELETE FROM %s" % table
++ cur.execute(delete_from)
++
++ def migrateTable(self, table):
++ """ Migrate the table specified. """
++ if not self.opts.noclean:
++ self.cleanTable(table)
++ m = getattr(self, ''.join(['migrate_', table.upper()]), None)
++ if not m:
++ rc = self.default_copy(table)
++ else:
++ rc = m()
++ return rc
++
++ def default_copy(self, table):
++ """ Copy the table from the sqlite db to the postgresql db """
++ select_all = "SELECT * FROM %s" % table
++ scur = self.sdb.cursor()
++ pgcur = self.pgdb.cursor()
++ scur.execute(select_all)
++ cols = scur.description
++ if not cols:
++ return True
++ subs = ["%s" for x in range(len(cols))]
++ insert_into = ''.join([ "INSERT INTO ", table, " VALUES (",
++ ','.join(subs), ")"])
++ rows = row_exists = 0
++ for row in scur:
++ rows += 1
++ try:
++ pgcur.execute(insert_into, row)
++ except (ProgrammingError, IntegrityError):
++ row_exists += 1
++ continue
++ if row_exists:
++ print "%s of %s rows already existed in the %s table" % \
++ (str(row_exists), str(rows), table)
++ self.pgdb.commit()
++ return row_exists > 0
++
++ def migrate_TICKET(self):
++ """ Migrate the ticket table and adjust the sequences properly """
++ self.default_copy('ticket')
++ select_maxticket = "SELECT max(id) FROM ticket"
++ pgcur = self.pgdb.cursor()
++ pgcur.execute(select_maxticket)
++ r = pgcur.fetchone()
++ if r:
++ pgcur.execute("SELECT setval('ticket_id_seq', %s)", r)
++ self.pgdb.commit()
++
++ def migrate_REPORT(self):
++ """ Migrate the report table and adjust the sequences properly """
++ self.default_copy('report')
++ select_maxreport = "SELECT max(id) FROM report"
++ pgcur = self.pgdb.cursor()
++ pgcur.execute(select_maxreport)
++ r = pgcur.fetchone()
++ if r:
++ pgcur.execute("SELECT setval('report_id_seq', %s)", r)
++ self.pgdb.commit()
++
++ def migrate_PERMISSION(self):
++ """
++ Migrate permission table
++ """
++ scur = self.sdb.cursor()
++ pgcur = self.pgdb.cursor()
++ rows = row_exists = 0
++ if not self.opts.plist:
++ sql_select = "SELECT * FROM permission"
++ else:
++ subs = []
++ for x in range(len(self.opts.plist)):
++ subs.append("%s")
++ continue
++ sql_select = ''.join([ "SELECT * FROM permission ",
++ "WHERE username NOT IN (",
++ ','.join(subs),
++ ")",
++ ])
++
++ scur.execute(sql_select, self.opts.plist)
++ sql_insert = "INSERT INTO permission VALUES (%s, %s)"
++ for row in scur:
++ rows += 1
++ try:
++ pgcur.execute(sql_insert, row)
++ except (ProgrammingError, IntegrityError):
++ row_exists += 1
++ continue
++ if row_exists:
++ print "%s of %s rows already existed in the permission table" % (str(row_exists), str(rows))
++ self.pgdb.commit()
++ return row_exists > 0
++
++ def migrate_WIKI(self):
++ """
++ Migrate wiki table
++ """
++ scur = self.sdb.cursor()
++ pgcur = self.pgdb.cursor()
++ rows = row_exists = 0
++ if not self.opts.wlist:
++ sql_select = "SELECT * FROM wiki"
++ else:
++ subs = []
++ for x in range(len(self.opts.wlist)):
++ subs.append("%s")
++ continue
++ sql_select = ''.join([ "SELECT * FROM wiki ",
++ "WHERE name NOT IN (",
++ ','.join(subs),
++ ")",
++ ])
++
++ scur.execute(sql_select, self.opts.wlist)
++ sql_insert = "INSERT INTO wiki VALUES (%s, %s, %s, %s, %s, %s, %s, %s)"
++ for row in scur:
++ rows += 1
++ try:
++ pgcur.execute(sql_insert, row)
++ except (ProgrammingError, IntegrityError):
++ row_exists += 1
++ continue
++ if row_exists:
++ print "%s of %s rows already existed in the wiki table" % (str(row_exists), str(rows))
++ self.pgdb.commit()
++ return row_exists > 0
++
++
++def getSQLiteEnvironment(opts):
++ """ Create an Environment connected to the SQLite database """
++
++ dburi = opts.sqlite_uri
++ env = Environment(opts.tracenv)
++ env.config.set('trac', 'database', dburi)
++ return env
++
++def getPostgreSQLEnvironment(opts):
++ """ Create an Environment connected to the PostgreSQL database """
++
++ dburi = opts.pg_uri
++ env = Environment(opts.tracenv)
++ env.config.set('trac', 'database', dburi)
++ try:
++ cnx = env.get_db_cnx()
++ cur = cnx.cursor()
++ cur.execute("select value from system where name = 'database_version'");
++ except ProgrammingError:
++ cnx.rollback()
++ DatabaseManager(env).init_db()
++ DatabaseManager(env).shutdown()
++ for x in filter(None, [env.compmgr[cls] for cls in
++ ComponentMeta._registry.get(
++ IEnvironmentSetupParticipant, [])]):
++ if isinstance(x, EnvironmentSetup):
++ x.environment_created()
++ if env.needs_upgrade():
++ env.upgrade()
++ return env
++
++def getAllTables(env):
++ """ Queries the PostgreSQL database for a list of tables """
++ cnx = env.get_db_cnx()
++ schema = cnx.schema or u'public'
++ cur = cnx.cursor()
++ select_tables = """SELECT tablename
++ FROM pg_catalog.pg_tables
++ WHERE schemaname = %s"""
++ cur.execute(select_tables, (schema,))
++ cnx.commit()
++ return [table[0] for table in cur]
++
++def Main(opts):
++ """
++ Migrate data from SQLite to PostgreSQL
++ """
++ rc = 0
++
++ sqlenv = getSQLiteEnvironment(opts)
++ pgenv = getPostgreSQLEnvironment(opts)
++ tmigration = TableMigration(sqlenv, pgenv, opts)
++ if not opts.tlist:
++ opts.tlist = getAllTables(pgenv)
++ for tname in opts.tlist:
++ try:
++ rc = tmigration.migrateTable(tname) or rc
++ except AttributeError:
++ print "Migration of %s has not been implemented" % tname
++ pass
++ continue
++
++ return rc
++
++def doArgs(argv):
++ """ Look if you can't guess what this function does, just give up now. """
++ global VERSION
++
++ version = "%%prog %s" % VERSION
++ usage ="usage: %prog [options] [site]"
++ description="%prog is used to migrate data from SQLite to PostgreSQL."
++
++ parser = OptionParser(usage=usage, version=version, description=description)
++
++ parser.add_option("-t", "--tracbase", dest="tracbase", type="string",
++ help="Parent path for trac sites",
++ metavar="<path>")
++ parser.add_option("-e", "--tracenv", dest="tracenv", type="string",
++ help="Path to trac environment",
++ metavar="<path>")
++ parser.add_option("-m", "--migrate", dest="migrate", type="string",
++ help="Comma separated list of tables to migrate",
++ metavar="<list>", default=None)
++ parser.add_option("", "--exclude_perms", dest="perms_exclude",
++ type="string", help="Comma separated list of users to "
++ "exclude from permission migration", metavar="<list>")
++ parser.add_option("-w", "--wikipages", dest="wikipages", type="string",
++ help="Comma separated list of wiki page names to "
++ "ommit from the migration", metavar="<list>")
++ parser.add_option("-p", "--pg_uri", dest="pg_uri", type="string",
++ help="DB URI for PostgreSQL database",
++ metavar="<uri>")
++ parser.add_option("-s", "--sqlite_uri", dest="sqlite_uri", type="string",
++ help="DB URI for SQLite database",
++ metavar="<uri>", default="sqlite:db/trac.db")
++ parser.add_option("", "--noclean", dest="noclean", action="store_true",
++ help="Do not clear PostgreSQL tables before transfer",
++ default=False)
++
++ (options, args) = parser.parse_args(argv)
++ if not options.tracenv:
++ if not options.tracbase:
++ print ("You must specify the --tracenv or the --tracbase option")
++ sys.exit(1)
++ else:
++ if len(args) < 1:
++ print ("You must specify a project name\n")
++ sys.exit(1)
++ options.project = args[0]
++ options.tracenv = os.path.join(options.tracbase, args[0])
++
++ if not options.pg_uri or not options.pg_uri.startswith('postgres://'):
++ print ("You must specify a valid URI for the PostgreSQL database.")
++ print (" eg. postgres://user:password@localhost/dbname")
++ sys.exit(1)
++
++ if not options.sqlite_uri or not options.sqlite_uri.startswith('sqlite:'):
++ print ("You must specify a valid URI for the SQLite database.")
++ print (" eg. sqlite:db/trac.db")
++ sys.exit(1)
++
++
++ options.args = args
++ options.tlist = options.migrate and \
++ [t.strip() for t in options.migrate.strip().split(',')]
++ options.wlist = options.wikipages and \
++ [w.strip() for w in options.wikipages.strip().split(',')] \
++ or []
++ options.plist = options.perms_exclude and \
++ [p.strip() for p in options.perms_exclude.strip().split(',')] \
++ or []
++
++ return options
++
++
++def main(argv):
++ opts = doArgs(argv)
++ Main(opts)
++ return 0
++
++if __name__ == '__main__':
++ sys.exit(main(sys.argv[1:]))
++
Copied: packages/trac/branches/lenny-backports/debian/patches/60_roadmap_dbx_cursor_fix.dpatch (from rev 4309, packages/trac/trunk/debian/patches/60_roadmap_dbx_cursor_fix.dpatch)
===================================================================
--- packages/trac/branches/lenny-backports/debian/patches/60_roadmap_dbx_cursor_fix.dpatch (rev 0)
+++ packages/trac/branches/lenny-backports/debian/patches/60_roadmap_dbx_cursor_fix.dpatch 2009-12-17 14:18:42 UTC (rev 4310)
@@ -0,0 +1,22 @@
+#! /bin/sh /usr/share/dpatch/dpatch-run
+## 60_roadmap_dbx_cursor_fix.dpatch by <wmb at wmbmobil.Ammonoidea.Ammonit.de>
+##
+## All lines beginning with `## DP:' are a description of the patch.
+## DP: fix a potential sqlite cursor problem
+
+ at DPATCH@
+
+diff --git a/trac/ticket/roadmap.py b/trac/ticket/roadmap.py
+index d2684f1..57eb6a5 100644
+--- a/trac/ticket/roadmap.py
++++ b/trac/ticket/roadmap.py
+@@ -195,7 +195,8 @@ class DefaultTicketGroupStatsProvider(Component):
+ for s in all_statuses:
+ status_cnt[s] = 0
+ if total_cnt:
+- cursor = self.env.get_db_cnx().cursor()
++ db = self.env.get_db_cnx()
++ cursor = db.cursor()
+ str_ids = [str(x) for x in sorted(ticket_ids)]
+ cursor.execute("SELECT status, count(status) FROM ticket "
+ "WHERE id IN (%s) GROUP BY status" %
More information about the Python-apps-commits
mailing list