diff options
author | Matthew Somerville <matthew-github@dracos.co.uk> | 2015-01-19 16:20:55 +0000 |
---|---|---|
committer | Matthew Somerville <matthew-github@dracos.co.uk> | 2015-01-19 16:35:36 +0000 |
commit | 2e8df1a5a6610c43e0c1bda15d018fa16738061b (patch) | |
tree | e06b0a33d399caf5ba0fb94186f609617496740e | |
parent | 7fa239a9c2122074bb65bbb0ac7d30d922a4f761 (diff) |
Tidy up of bin directory.
Remove some unneeded scripts, move others to cobrand-specific
directories.
-rw-r--r-- | .travis.yml | 2 | ||||
-rwxr-xr-x | bin/comment-backfill | 21 | ||||
-rwxr-xr-x | bin/emptyhomes/canonicalise-eha (renamed from bin/canonicalise-eha) | 0 | ||||
-rwxr-xr-x | bin/emptyhomes/make_welsh_po (renamed from bin/make_emptyhomes_welsh_po) | 8 | ||||
-rw-r--r-- | bin/fiksgatami/export-norwegian-contacts (renamed from bin/export-norwegian-contacts) | 0 | ||||
-rwxr-xr-x | bin/fiksgatami/load-norwegian-contacts (renamed from bin/load-norwegian-contacts) | 0 | ||||
-rwxr-xr-x | bin/fixmystreet.com/canonicalise-csv (renamed from bin/canonicalise-csv) | 0 | ||||
-rwxr-xr-x | bin/fixmystreet.com/generate_council_location (renamed from bin/generate_council_location) | 0 | ||||
-rwxr-xr-x | bin/fixmystreet.com/import-categories (renamed from bin/import-categories) | 0 | ||||
-rwxr-xr-x | bin/fixmystreet.com/load-contacts (renamed from bin/load-contacts) | 0 | ||||
-rwxr-xr-x | bin/fixmystreet.com/populate_bing_cache (renamed from bin/populate_bing_cache) | 0 | ||||
-rwxr-xr-x | bin/fixmystreet.com/rotate-photos (renamed from bin/rotate-photos) | 0 | ||||
-rwxr-xr-x | bin/fixmystreet.com/showcouncilrates (renamed from bin/showcouncilrates) | 0 | ||||
-rwxr-xr-x | bin/fixmystreet.com/update-areas (renamed from bin/update-areas) | 0 | ||||
-rwxr-xr-x | bin/fixmystreet.com/update-send-questionnaire (renamed from bin/update-send-questionnaire) | 0 | ||||
-rwxr-xr-x | bin/install-as-user | 2 | ||||
-rwxr-xr-x | bin/kasabi | 234 | ||||
-rwxr-xr-x | bin/zerotb/import_clinic_list.pl (renamed from bin/zerotb_import_clinic_list.pl) | 0 | ||||
-rwxr-xr-x | bin/zurich/geocode | 45 | ||||
-rwxr-xr-x | bin/zurich/overdue-alert (renamed from bin/zurich-overdue-alert) | 2 | ||||
-rw-r--r-- | data/kasabi-requirements.txt | 2 | ||||
-rw-r--r-- | perllib/FixMyStreet/DB/Result/Token.pm | 5 | ||||
-rw-r--r-- | perllib/Utils.pm | 1 |
23 files changed, 7 insertions, 315 deletions
diff --git a/.travis.yml b/.travis.yml index bad676933..1e4b7611f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -33,7 +33,7 @@ before_script: sed -r -e "s,(FMS_DB_USER:) 'fms',\\1 'postgres'," conf/general.yml-example > conf/general.yml - ./bin/cron-wrapper ./bin/make_po FixMyStreet-EmptyHomes - - ./bin/cron-wrapper ./bin/make_emptyhomes_welsh_po + - ./bin/cron-wrapper ./bin/emptyhomes/make_welsh_po - commonlib/bin/gettext-makemo FixMyStreet script: "bin/cron-wrapper perl /usr/bin/prove -rl t" after_script: diff --git a/bin/comment-backfill b/bin/comment-backfill deleted file mode 100755 index e296d7756..000000000 --- a/bin/comment-backfill +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env perl - -use strict; -use warnings; -require 5.8.0; -use DateTime; - -use FixMyStreet::App; - -use Open311; -use Open311::GetServiceRequestUpdates; - -my $start_time = DateTime->now->subtract( days => 1, hours => 1 ); -my $end_time = DateTime->now; - -my $updates = Open311::GetServiceRequestUpdates->new( - start_date => $start_time, - end_date => $end_time, -); - -$updates->fetch; diff --git a/bin/canonicalise-eha b/bin/emptyhomes/canonicalise-eha index 1030982fa..1030982fa 100755 --- a/bin/canonicalise-eha +++ b/bin/emptyhomes/canonicalise-eha diff --git a/bin/make_emptyhomes_welsh_po b/bin/emptyhomes/make_welsh_po index f4f6850d6..a85e7c35b 100755 --- a/bin/make_emptyhomes_welsh_po +++ b/bin/emptyhomes/make_welsh_po @@ -6,10 +6,10 @@ use strict; use POSIX; use FindBin; -use lib "$FindBin::Bin/../perllib"; +use lib "$FindBin::Bin/../../perllib"; use PoChange; -chdir("$FindBin::Bin/../locale"); +chdir("$FindBin::Bin/../../locale"); # First read in translation and match up. open(INPO, 'cy_GB.UTF-8/LC_MESSAGES/EmptyHomes.po') or die $!; @@ -51,7 +51,7 @@ mkdir("cy_GB.UTF-8/LC_MESSAGES"); open(MAINPO, 'FixMyStreet.po') or die; open(OUTPO, ">cy_GB.UTF-8/LC_MESSAGES/FixMyStreet-EmptyHomes.po") or die; -print OUTPO "# AUTOMATICALLY GENERATED by make_emptyhomes_welsh_po, do not edit\n\n"; +print OUTPO "# AUTOMATICALLY GENERATED by make_welsh_po, do not edit\n\n"; my $buffer = ""; my $start = 0; @@ -60,7 +60,7 @@ while(<MAINPO>) { s/#, fuzzy/#/; } if (m/"Last-Translator: FULL NAME/) { - $_ = '"Last-Translator: mysociety/bin/make_emptyhomes_po\\n"'."\n"; + $_ = '"Last-Translator: mysociety/bin/emptyhomes/make_welsh_po\\n"'."\n"; } if (m/"PO-Revision-Date: YEAR-MO-DA/) { my $time = POSIX::strftime("%Y-%m-%d %H:%M%z", localtime(time())); diff --git a/bin/export-norwegian-contacts b/bin/fiksgatami/export-norwegian-contacts index 0209c6512..0209c6512 100644 --- a/bin/export-norwegian-contacts +++ b/bin/fiksgatami/export-norwegian-contacts diff --git a/bin/load-norwegian-contacts b/bin/fiksgatami/load-norwegian-contacts index b73778848..b73778848 100755 --- a/bin/load-norwegian-contacts +++ b/bin/fiksgatami/load-norwegian-contacts diff --git a/bin/canonicalise-csv b/bin/fixmystreet.com/canonicalise-csv index c0a7fc60b..c0a7fc60b 100755 --- a/bin/canonicalise-csv +++ b/bin/fixmystreet.com/canonicalise-csv diff --git a/bin/generate_council_location b/bin/fixmystreet.com/generate_council_location index c7aea8074..c7aea8074 100755 --- a/bin/generate_council_location +++ b/bin/fixmystreet.com/generate_council_location diff --git a/bin/import-categories b/bin/fixmystreet.com/import-categories index e9008b93f..e9008b93f 100755 --- a/bin/import-categories +++ b/bin/fixmystreet.com/import-categories diff --git a/bin/load-contacts b/bin/fixmystreet.com/load-contacts index b18699db1..b18699db1 100755 --- a/bin/load-contacts +++ b/bin/fixmystreet.com/load-contacts diff --git a/bin/populate_bing_cache b/bin/fixmystreet.com/populate_bing_cache index 17c8911d0..17c8911d0 100755 --- a/bin/populate_bing_cache +++ b/bin/fixmystreet.com/populate_bing_cache diff --git a/bin/rotate-photos b/bin/fixmystreet.com/rotate-photos index 7b8109d65..7b8109d65 100755 --- a/bin/rotate-photos +++ b/bin/fixmystreet.com/rotate-photos diff --git a/bin/showcouncilrates b/bin/fixmystreet.com/showcouncilrates index 1dacae597..1dacae597 100755 --- a/bin/showcouncilrates +++ b/bin/fixmystreet.com/showcouncilrates diff --git a/bin/update-areas b/bin/fixmystreet.com/update-areas index a8cc01769..a8cc01769 100755 --- a/bin/update-areas +++ b/bin/fixmystreet.com/update-areas diff --git a/bin/update-send-questionnaire b/bin/fixmystreet.com/update-send-questionnaire index 7a231b919..7a231b919 100755 --- a/bin/update-send-questionnaire +++ b/bin/fixmystreet.com/update-send-questionnaire diff --git a/bin/install-as-user b/bin/install-as-user index ef97a41b3..33a74c140 100755 --- a/bin/install-as-user +++ b/bin/install-as-user @@ -134,7 +134,7 @@ echo $DONE_MSG # Generate po and mo files (these invocations taken from Kagee's script): echo "Creating locale .mo files" bin/cron-wrapper bin/make_po FixMyStreet-EmptyHomes -bin/cron-wrapper bin/make_emptyhomes_welsh_po +bin/cron-wrapper bin/emptyhomes/make_welsh_po commonlib/bin/gettext-makemo FixMyStreet echo $DONE_MSG diff --git a/bin/kasabi b/bin/kasabi deleted file mode 100755 index 456b2f4d1..000000000 --- a/bin/kasabi +++ /dev/null @@ -1,234 +0,0 @@ -#!/usr/bin/env python - -import sys -import datetime -import json -import os.path -import re -import urllib -import yaml - -import pytassium -import psycopg2 -import psycopg2.extras -from rdfchangesets import BatchChangeSet -from rdflib.namespace import XSD - -# Set up data access -config = yaml.load(open(os.path.abspath(os.path.join(os.path.dirname(__file__), '../conf/general.yml')))) -dataset = pytassium.Dataset('fixmystreet', config['KASABI_API_KEY']) -db = psycopg2.connect( "host='{host}' dbname='{name}' user='{user}' password='{password}'".format( - host=config['FMS_DB_HOST'], - name=config['FMS_DB_NAME'], - user=config['FMS_DB_USER'], - password=config['FMS_DB_PASS'] -)) -cursor = db.cursor(cursor_factory=psycopg2.extras.RealDictCursor) -report_cursor = db.cursor(cursor_factory=psycopg2.extras.RealDictCursor) - -def main(): - - # Check the status of our dataset - response, status = dataset.status() - if response.status not in range(200, 300) or status['storageMode'] == 'read-only': - # We can't import anything, so let's not bother - sys.exit() - - # Fetch reports that have changed since last update in dataset - response, data = dataset.select('select (max(?lastupdate) as ?max) where { ?report <http://data.kasabi.com/dataset/fixmystreet/def/lastUpdate> ?lastupdate }') - max_lastUpdate = data[1][0]['max'] - query = """ - SELECT id, latitude, longitude, used_map, council, - category, title, detail, (photo IS NOT NULL) as photo, - confirmed, lastupdate, whensent, state - FROM problem - WHERE state not in ('unconfirmed', 'partial') - """ - if len(sys.argv) > 1 and sys.argv[1].isdigit(): - cursor.execute("%s AND id=%%s" % query, (sys.argv[1],)) - else: - cursor.execute("%s AND lastupdate > %%s ORDER BY lastupdate" % query, (str(max_lastUpdate),)) - - for report in cursor: - changeset = FixMyStreetChangeSet(dataset) - if report['state'] == 'hidden': - # If the report has been hidden, just remove it - changeset.remove_report(report) - else: - - # Canonicalise some values - report['latitude'] = round(report['latitude'], 6) # <10cm - report['longitude'] = round(report['longitude'], 6) - report['title'] = tidy_string(report['title']) - report['detail'] = tidy_string(report['detail']) - report['confirmed'] = report['confirmed'].replace(microsecond=0).isoformat() # Don't want microseconds - report['lastupdate'] = report['lastupdate'].replace(microsecond=0).isoformat() - report['council'] = sorted(re.sub('\|.*', '', report['council'] or '').split(',')) # Remove missing councils - - # Fetch updates to note state changes - states = [ { 'state': 'confirmed', 'time': report['confirmed'] } ] - report_cursor.execute(""" - SELECT id, mark_fixed, mark_open, problem_state, confirmed - FROM comment - WHERE problem_id=%s AND state='confirmed' - ORDER BY created - """, (report['id'], )) - for update in report_cursor: - t = update['confirmed'].replace(microsecond=0).isoformat() - if update['problem_state']: - states.append( { 'state': update['problem_state'], 'time': t } ) - elif update['mark_fixed']: - states.append( { 'state': 'fixed - user', 'time': t } ) - elif update['mark_open']: - states.append( { 'state': 'confirmed', 'time': t } ) - - # Remove and then re-add the report - changeset.remove_report(report) - changeset.add_report(report, states) - changeset.apply() - -# Escape double quotes and backslashes, remove carriage returns -def tidy_string(s): - return s.replace('\r', '').replace('\\', '\\\\').replace('"', r'\"') - -class FixMyStreetChangeSet(object): - """Something that hosts either or both of a BatchChangeSet and a Turtle - string for sending to Kasabi. Changes are done by removing all triples - and then readding the report.""" - _changeset = None - data = '' - - def __init__(self, dataset): - self.dataset = dataset - - def __str__(self): - return unicode(self).encode('utf-8') - - def __unicode__(self): - g = self.changeset.getGraph() - data = g.serialize(format='xml') - return "Changeset:\n" + data + "\nNew data:\n" + self.data - - @property - def changeset(self): - if not self._changeset: - self._changeset = BatchChangeSet() - self._changeset.setChangeReason("Report updates") - self._changeset.setCreatorName("FixMyStreet") - return self._changeset - - def apply(self): - if len(self.changeset.changesets): - #response, data = self.dataset.apply_changeset(self.changeset) - # XXX Do everything the above call does, but additionally escape carriage returns to prevent 409 error - api = self.dataset.get_api('update') - g = self.changeset.getGraph() - data = g.serialize(format='xml') - data = data.replace('\r', ' ') - response, data = api.client.request(api.uri, "POST", body=data, headers={"accept" : "*/*", 'content-type':'application/vnd.talis.changeset+xml', 'X_KASABI_APIKEY':api.apikey}) - if response.status not in range(200, 300): - print 'Error:', response.status, response.reason, data - if self.data: - response, data = self.dataset.store_data(self.data, media_type='text/turtle') - if response.status not in range(200, 300): - print 'Error:', response.status, response.reason, data - - def remove_report(self, report): - uri = 'http://data.kasabi.com/dataset/fixmystreet/report/{id}'.format(**report) - response, data = self.dataset.select('select ?p ?o where {{ <{0}> ?p ?o }}'.format(uri)) - for row in data[1]: - # Need to set the datatype correctly for the lastUpdate - if str(row['p']) == 'http://data.kasabi.com/dataset/fixmystreet/def/lastUpdate': - row['o'].datatype = XSD.dateTime - # Delete the referenced statuses - if re.match('http://data.kasabi.com/dataset/fixmystreet/report/\d+/status/\d+$', unicode(row['o'])): - uri2 = unicode(row['o']) - response2, data2 = self.dataset.select('select ?p ?o where {{ <{0}> ?p ?o }}'.format(uri2)) - for row2 in data2[1]: - self.changeset.remove(uri2, row2['p'], row2['o']) - self.changeset.remove(uri, row['p'], row['o']) - - def add_report(self, report, states): - # Work out the update states - c = 0 - state_data = { 'refs': '', 'objs': '' } - for state in states: - state_data['refs'] += ' ; fixmystreet:status <http://data.kasabi.com/dataset/fixmystreet/report/{id}/status/{c}>\n'.format(id=report['id'], c=c) - obj = re.sub('[ -]', '', ' '.join(x.capitalize() for x in state['state'].split())) - if obj == 'Confirmed': obj = 'Open' - state_data['objs'] += """<http://data.kasabi.com/dataset/fixmystreet/report/{id}/status/{c}> a fixmystreet:{state}Status - ; event:time <http://reference.data.gov.uk/id/gregorian-instant/{time}> - . -""".format( id=report['id'], c=c, state=obj, time=state['time'] ) - # ; rdfs:label - c += 1 - - # Get info for the councils - council_data = { 'sentTo': '', 'areaNames': [] } - for council in report['council']: - if not council: continue - js = json.load(urllib.urlopen('http://mapit.mysociety.org/area/{0}'.format(council))) - os_id = int(js['codes']['unit_id']) + 7000000000000000 - if report['whensent']: - council_data['sentTo'] += ' ; fixmystreet:sentTo <http://data.ordnancesurvey.co.uk/id/{os_id}>\n'.format(os_id=os_id) - council_data['areaNames'].append(js['name']) - council_data.setdefault('firstCouncil', council) - council_data['areaNames'] = ' / '.join(council_data['areaNames']) - council_data.setdefault('firstCouncil', '0') - -# easting/northing - - self.data += ''' -@prefix fixmystreet: <http://data.kasabi.com/dataset/fixmystreet/def/> . -@prefix dct: <http://purl.org/dc/terms/> . -@prefix event: <http://purl.org/NET/c4dm/event.owl#> . -@prefix geo: <http://www.w3.org/2003/01/geo/wgs84_pos#> . -@prefix xsd: <http://www.w3.org/2001/XMLSchema#> . -@prefix skos: <http://www.w3.org/2004/02/skos/core#> . -@prefix foaf: <http://xmlns.com/foaf/0.1/> . -@prefix georss: <http://www.georss.org/georss/> . -@prefix owl: <http://www.w3.org/2002/07/owl#> . - -<http://data.kasabi.com/dataset/fixmystreet/report/{id}> a fixmystreet:Report - ; fixmystreet:location <http://data.kasabi.com/dataset/fixmystreet/location/geo/point/{latitude}/{longitude}> - ; dct:description """{detail}""" - ; dct:title "{title}" -{photo_url} -{state_data[refs]} -{council_data[sentTo]} - ; fixmystreet:category <http://data.kasabi.com/dataset/fixmystreet/category/{council_data[firstCouncil]}/{category_uri}> - ; fixmystreet:lastUpdate "{lastupdate}"^^xsd:dateTime - ; foaf:page <http://www.fixmystreet.com/report/{id}> - . - -<http://data.kasabi.com/dataset/fixmystreet/location/geo/point/{latitude}/{longitude}> a fixmystreet:Location - ; geo:lat "{latitude}" - ; geo:long "{longitude}" - ; georss:point "{latitude} {longitude}" - . - -<http://data.kasabi.com/dataset/fixmystreet/location/geo/point/{latitude}/{longitude}> - owl:sameAs <http://rdfize.com/geo/point/{latitude}/{longitude}> - . -{state_data[objs]} -<http://data.kasabi.com/dataset/fixmystreet/category/{council_data[firstCouncil]}/{category_uri}> a skos:Concept - ; skos:prefLabel "{category}" - ; skos:altLabel "{category} in {council_data[areaNames]}" - . - '''.format( - photo_url = ' ; foaf:depiction <http://www.fixmystreet.com/photo/{id}.jpeg>'.format(**report) if report['photo'] else '', - state_data = state_data, - council_data = council_data, - category_uri = report['category'].lower().replace(' ', '-'), - **report - ) - -# ; skos:broader <http://data.kasabi.com/dataset/fixmystreet/category/street-lights> - -# this category is the broadest highlevel street light category -#<http://data.kasabi.com/dataset/fixmystreet/category/street-lights> a skos:Concept -# ; skos:prefLabel "Street lights" -# . - -main() - diff --git a/bin/zerotb_import_clinic_list.pl b/bin/zerotb/import_clinic_list.pl index 359a63925..359a63925 100755 --- a/bin/zerotb_import_clinic_list.pl +++ b/bin/zerotb/import_clinic_list.pl diff --git a/bin/zurich/geocode b/bin/zurich/geocode deleted file mode 100755 index 9482b27e6..000000000 --- a/bin/zurich/geocode +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/perl - -=head1 NAME - -zurich/geocode - commandline tool to test the Zurich geocoder - -=head1 SYNOPSIS - - # Firstly: - ## copy the GEOCODER config from a current Zurich conf to your conf/general.yml - $ eval `perl setenv.pl` - - $ bin/zurich/geocode Magnus - - # ... output from geocoder - -This can be used to test the results of, e.g. - - https://www.zueriwieneu.ch/ajax/geocode?term=Magnus - -but without the caching which FixMyStreet applies, and passing on any 500 -errors from the server. - -=cut - -use strict; -use warnings; -require 5.8.0; - - -use Data::Dumper; -use feature 'say'; - -use FixMyStreet; -use FixMyStreet::App; -use FixMyStreet::Geocode::Zurich; - -# TODO use FixMyStreet::override_config to get data from conf/general.yml.zurich if available -my $geocoder = FixMyStreet->config('GEOCODER') - or die "No GEOCODER config -- please copy appropriate Zurich conf to conf/general.yml"; - -my $c = FixMyStreet::App->new(); -my $s = join ' ', @ARGV; - -say Dumper( FixMyStreet::Geocode::Zurich::string( $s, $c ) ); diff --git a/bin/zurich-overdue-alert b/bin/zurich/overdue-alert index c09aef1e2..fd9c26cb9 100755 --- a/bin/zurich-overdue-alert +++ b/bin/zurich/overdue-alert @@ -1,6 +1,6 @@ #!/usr/bin/env perl -# zurich-overdue-alert: +# zurich/overdue-alert: # Send email alerts to administrators for overdue admin activities. # # Copyright (c) 2012 UK Citizens Online Democracy. All rights reserved. diff --git a/data/kasabi-requirements.txt b/data/kasabi-requirements.txt deleted file mode 100644 index 9b4397f00..000000000 --- a/data/kasabi-requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -psycopg2 -pytassium diff --git a/perllib/FixMyStreet/DB/Result/Token.pm b/perllib/FixMyStreet/DB/Result/Token.pm index 5525fe7a5..0156af137 100644 --- a/perllib/FixMyStreet/DB/Result/Token.pm +++ b/perllib/FixMyStreet/DB/Result/Token.pm @@ -30,9 +30,6 @@ __PACKAGE__->set_primary_key("scope", "token"); # Created by DBIx::Class::Schema::Loader v0.07017 @ 2012-03-08 17:19:55 # DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:+LLZ8P5GXqPetuGyrra2vw -# Trying not to use this -# use mySociety::DBHandle qw(dbh); - use mySociety::AuthToken; =head1 NAME @@ -43,8 +40,6 @@ FixMyStreet::DB::Result::Token Representation of mySociety::AuthToken in the DBIx::Class world. -Mostly done so that we don't need to use mySociety::DBHandle. - The 'data' value is automatically inflated and deflated in the same way that the AuthToken would do it. 'token' is set to a new random value by default and the 'created' timestamp is achieved using the database function diff --git a/perllib/Utils.pm b/perllib/Utils.pm index 8f0ac1820..6d6d5bcde 100644 --- a/perllib/Utils.pm +++ b/perllib/Utils.pm @@ -15,7 +15,6 @@ use strict; use DateTime; use Encode; use File::Slurp qw(); -use mySociety::DBHandle qw(dbh); use mySociety::GeoUtil; use mySociety::Locale; |