This repository has been archived by the owner on Dec 10, 2019. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 4
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
David Bailey
authored and
David Bailey
committed
May 6, 2015
1 parent
57c92a0
commit 2b10547
Showing
11 changed files
with
487 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,30 @@ | ||
from mpl_toolkits.basemap import Basemap | ||
from matplotlib import pyplot | ||
import numpy as np | ||
import CoreLocation | ||
|
||
manager = CoreLocation.CLLocationManager.alloc().init() | ||
manager.delegate() | ||
manager.startUpdatingLocation() | ||
loc = manager.location() | ||
if loc is None: | ||
lat, lon = 0,0 | ||
else: | ||
coord = loc.coordinate() | ||
lat, lon = coord.latitude, coord.longitude | ||
|
||
//def convert_to_decimal(degrees, arcminutes, arcseconds): | ||
// return float(degrees + arcminutes/60. + arcseconds/3600.) | ||
|
||
m = Basemap(projection='merc',llcrnrlat=-80,urcrnrlat=80,llcrnrlon=-180,urcrnrlon=180,lat_ts=20,resolution='c') | ||
|
||
m.drawcoastlines() | ||
m.drawstates() | ||
m.bluemarble() | ||
m.drawparallels(np.arange(-90.,91.,30.)) | ||
m.drawmeridians(np.arange(-180.,181.,60.)) | ||
m.drawmapboundary(fill_color='aqua') | ||
m.scatter(lon,lat,100,latlon=1,color='m',marker='.') | ||
|
||
pyplot.title("Mercator Projection | Latitude: " + str(lat) + ", Longitude: " + str(lon)) | ||
pyplot.show() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,92 @@ | ||
import math | ||
import csv | ||
import numpy | ||
import numpy as np | ||
import scipy as sp | ||
import matplotlib as mpl | ||
import matplotlib.pyplot as plt | ||
import scipy.fftpack | ||
import datetime | ||
import pandas as pd | ||
from scipy import stats | ||
|
||
def pv(fv,r): | ||
return (fv/(1+r)) | ||
|
||
def pvSum(c,r): //PV as a sum of cash flows c w/ interest rate | ||
sum=0 | ||
for n, ck in enumerate(c): | ||
print n,ck,r | ||
sum += ck/((1+r)**n) | ||
return sum | ||
|
||
def annuity(A,r): //Annuity pays A forever | ||
return A/r | ||
|
||
def annuityEnd(A,r,n)Annuity that pays A until n | ||
return annuity(A,r)*(1-1/((1+r)**n)) | ||
|
||
def discountRate(spotRate,t): | ||
return 1/((1+spotRate)**t) | ||
|
||
def fv(pv,r): | ||
return (pv*(1+r)) | ||
|
||
def p(f,i,n): | ||
return (f/((1+i)**n)) | ||
vec_p = np.vectorize(p) | ||
ptest = np.array([np.array([ 1., 2., 3.]), np.array(10), np.array(10)]) | ||
vec_p(ptest[0],ptest[1],ptest[2]) | ||
|
||
def f(p,i,n): | ||
return (p*((1+i)**n)) | ||
|
||
def fe(p,i,n): | ||
return (p*(math.e**(i*n))) | ||
|
||
def pe(f,i,n): | ||
return (f/(math.e**(i*n))) | ||
|
||
cpiv = [] | ||
cpid = [] | ||
with open('CPIAUCSL.csv', 'rb') as cpifile: | ||
cpi = csv.reader(cpifile) | ||
for row in cpi: | ||
cpiv.append(row[1]) | ||
cpid.append(row[0]) | ||
|
||
cpiv.pop(0) | ||
cpid.pop(0) | ||
|
||
cpidd = [] | ||
for item in cpid: | ||
cpidd.append(mpl.dates.date2num(datetime.datetime.strptime(item, "%Y-%m-%d"))) | ||
|
||
plt.plot_date(x=cpidd,y=cpiv) | ||
plt.show() | ||
|
||
c = [] | ||
b = 0 | ||
for a in cpiv: | ||
if b: c.append((float(a)-b)/float(a)) | ||
b = float(a) | ||
|
||
numpy.mean(c)*12 | ||
|
||
x = np.poly1d([1,0]) | ||
|
||
cpif = scipy.fftpack.fft(cpim) | ||
|
||
def ERi(Rf,Bi,ERm): //CAPM | ||
return (Rf+Bi*(ERm-Rf)) | ||
|
||
df = pd.read_csv('CPIAUCSL.csv') | ||
|
||
x = [5.05, 6.75, 3.21, 2.66] | ||
y = [1.65, 26.5, -5.93, 7.96] | ||
gradient, intercept, r_value, p_value, std_err = stats.linregress(x,y) | ||
t = np.arange([0.0, 10.0, 0.1]) | ||
z = gradient*t+intercept | ||
plt.plot(t,z) | ||
plt.scatter(x,y) | ||
plt.show() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
import pandas as pd | ||
import matplotlib.pyplot as plt | ||
freq = pd.read_csv('frequency.csv', sep=',') | ||
freq['count'].plot() | ||
plt.show() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,61 @@ | ||
from lxml import etree | ||
from cStringIO import StringIO | ||
import pycurl | ||
import pandas as pd | ||
import datetime | ||
|
||
start = datetime.datetime(2014,9,19) | ||
finish = datetime.datetime(2014,11,2) | ||
delta = datetime.timedelta(hours=1) | ||
times = [] | ||
while start <= finish: | ||
times.append(start) | ||
start += delta | ||
|
||
for start in times: | ||
print start.strftime("%Y-%m-%dT%H:%M:%SZ") | ||
end = start + delta | ||
request1 = '<?xml version="1.0" encoding="UTF-8"?><soapenc:Envelope xmlns:soapenc="http://schemas.xmlsoap.org/soap/envelope/"><soapenc:Body><getFlows><flow-filter max-rows="100000" domain-id="133" include-interface-data="false">' | ||
request2 = '<date-selection><time-range-selection start="' + start.strftime("%Y-%m-%dT%H:%M:%SZ") + '" end="' + end.strftime("%Y-%m-%dT%H:%M:%SZ") + '" /></date-selection>' | ||
request3 = '<applications>175,53</applications>' | ||
request4 = '</flow-filter></getFlows></soapenc:Body></soapenc:Envelope>' | ||
request = request1 + request2 + request3 + request4 | ||
buffer = StringIO() | ||
co = pycurl.Curl() | ||
co.setopt(co.UNRESTRICTED_AUTH,1) | ||
co.setopt(co.URL,"https://lancope.example.com/smc/swsService/flows") | ||
co.setopt(co.POST, 1) | ||
co.setopt(co.INFILESIZE,len(request) + 1) | ||
co.setopt(co.WRITEFUNCTION, buffer.write) | ||
co.setopt(co.POSTFIELDS, request) | ||
co.setopt(co.SSL_VERIFYPEER, 0L) | ||
co.setopt(co.SSL_VERIFYHOST, 0L) | ||
co.setopt(co.USERPWD,"username:password") | ||
try: | ||
co.perform() | ||
except: | ||
print "POST failed" | ||
exit(1) | ||
|
||
co.close() | ||
out = buffer.getvalue() | ||
buffer.close() | ||
doc = etree.fromstring(out) | ||
netflows = [('client', 'clientHostName', 'clientPort', 'clientPackets', 'clientBytes', 'server', 'serverHostName', 'serverPort', 'serverPackets', 'serverBytes', 'startTime', 'lastTime', 'activeDuration')] | ||
for elem in doc.getiterator('{http://www.lancope.com/sws/sws-service}flow'): | ||
startTime = elem.get('start-time') | ||
lastTime = elem.get('last-time') | ||
activeDuration = elem.get('active-duration') | ||
client = elem[0].get('ip-address') | ||
clientHostName = elem[0].get('host-name') | ||
clientPort = elem[0].get('port') | ||
clientPackets = elem[0].get('packets') | ||
clientBytes = elem[0].get('bytes') | ||
server = elem[1].get('ip-address') | ||
serverHostName = elem[1].get('host-name') | ||
serverPort = elem[1].get('port') | ||
serverPackets = elem[1].get('packets') | ||
serverBytes = elem[1].get('bytes') | ||
netflows.append((client, clientHostName, clientPort, clientPackets, clientBytes, server, serverHostName, serverPort, serverPackets, serverBytes, startTime, lastTime, activeDuration)) | ||
|
||
pd.DataFrame(netflows).to_csv(start.strftime("%Y-%m-%dT%H%M%SZ") + end.strftime("%Y-%m-%dT%H%M%SZ") + '.csv') |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,33 @@ | ||
import shapely.geometry | ||
import matplotlib.pyplot as plt | ||
import fiona.collection | ||
import descartes | ||
|
||
places = {'Los Angeles': (34.0204989,-118.4117325), 'Phoenix': (33.6054149,-112.125051), 'Albuquerque': (35.0824099,-106.6764794)} | ||
|
||
path = [(x, y) for y, x in places.values()] | ||
ls = shapely.geometry.LineString(path) | ||
|
||
with fiona.collection("tl_2014_us_state/tl_2014_us_state.shp") as features: | ||
states = [shapely.geometry.shape(f['geometry']) for f in features] | ||
|
||
fig = plt.figure(figsize=(8,5), dpi=180) | ||
ax = fig.add_subplot(111) | ||
ax.axis([-125, -65, 25, 50]) | ||
ax.axis('off') | ||
|
||
ax.plot(*ls.xy, color='#FFFFFF') | ||
|
||
for state in states: | ||
if state.geom_type =='Polygon': | ||
state = [state] | ||
for poly in state: | ||
poly_patch = descartes.PolygonPatch(poly, fc='#6699cc', ec='#000000') | ||
ax.add_patch(poly_patch) | ||
|
||
for x, y in path: | ||
buffered = shapely.geometry.Point(x, y).buffer(1) | ||
ax.add_patch(descartes.PolygonPatch(buffered, fc='#EEEEEE', ec='#000000')) | ||
|
||
|
||
fig.show() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,8 @@ | ||
import gpxpy | ||
|
||
gpx_file = open("file", 'r') | ||
gpx = gpxpy.parse(gpx_file) | ||
|
||
points = [] | ||
for point in gpx.tracks[0].segments[0].points: | ||
points.append((point.latitude,point.longitude,point.elevation,point.time)) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,81 @@ | ||
import overpy | ||
import numpy | ||
from matplotlib import pyplot | ||
from shapely.geometry import LineString | ||
from shapely.geometry import shape | ||
from shapely.ops import polygonize_full | ||
from descartes import PolygonPatch | ||
from fiona import collection | ||
from geopandas import GeoSeries | ||
from geopandas import GeoDataFrame | ||
|
||
api = overpy.Overpass() | ||
|
||
countyRelation = api.query("rel(396479);(._;>;);out;") | ||
primaryHighwaysInBoundingBox = api.query("way(33.5277,-119.3500,34.9895,-117.1115)[highway=primary];(._;>);out;") | ||
|
||
countyWaysAboveIslands = [] | ||
for way in countyRelation.ways: | ||
above = 1 | ||
for node in way.nodes: | ||
if (node.lat < 33.6078): | ||
above = 0 | ||
if (above): | ||
countyWaysAboveIslands.append(way) | ||
|
||
countyLineStrings = [] | ||
for way in countyWaysAboveIslands: | ||
lineString = [] | ||
for node in way.nodes: | ||
lineString.append((node.lat,node.lon)) | ||
countyLineStrings.append(LineString(lineString)) | ||
|
||
polygons, dangles, cuts, invalids = polygonize_full(countyLineStrings) | ||
countyPolygon = polygons.geoms[0] | ||
|
||
highwayLineStrings = [] | ||
for way in primaryHighwaysInBoundingBox.ways: | ||
line = [] | ||
for node in way.nodes: | ||
line.append((node.lat,node.lon)) | ||
wayLineString = LineString(line) | ||
if countyPolygon.contains(wayLineString): highwayLineStrings.append(wayLineString) | ||
|
||
fig = pyplot.figure(figsize=(100,100)) | ||
ax = fig.add_subplot(111) | ||
|
||
for line in highwayLineStrings: | ||
x, y = line.xy | ||
ax.plot(x, y, color='#999999', linewidth=1, zorder=1) | ||
|
||
patch = PolygonPatch(countyPolygon, fc='#6699cc', ec='#6699cc', alpha=0.5, zorder=2) | ||
ax.add_patch(patch) | ||
|
||
fig.savefig('test.png') | ||
|
||
// debug info | ||
|
||
from shapely.geometry import MultiLineString | ||
test = MultiLineString(highwayLineStrings) | ||
patch = PolygonPatch(countyPolygon, fc='#6699cc', ec='#6699cc', alpha=0.5, zorder=2) | ||
ax.add_patch(patch) | ||
|
||
object.__dict__ | ||
result.nodes | ||
result.nodes.[0].lat | ||
result.ways | ||
result.ways[0]._node_ids[0] | ||
result.relations | ||
result.relations[0].members[0].ref | ||
|
||
len(streetsInBB.ways[0].get_nodes(resolve_missing=True)) | ||
|
||
for way in aboveIslands: | ||
gray = gray + .025 | ||
for node in way.nodes: | ||
ax.scatter(node.lon, node.lat, color=[gray,gray,gray], s=100, zorder=1) | ||
|
||
states = [shapely.geometry.shape(f['geometry']) for f in features] | ||
|
||
http://overpass.osm.rambler.ru/cgi/interpreter?data=%5Bout:json%5D;relation(396479);out; | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,40 @@ | ||
import numpy as np | ||
//from pylab import * | ||
import matplotlib.pyplot as plot | ||
import math | ||
|
||
top = np.array([[0,1,2],[2,2,1]]) | ||
mid = np.array([[0,1,2],[2,1,0]]) | ||
bot = np.array([[0,1,2],[2,0,0]]) | ||
|
||
plot.plot(top[0],top[1]) | ||
plot.plot(mid[0],mid[1]) | ||
plot.plot(bot[0],bot[1]) | ||
|
||
t = np.arange(0.0, 10.0, 0.1) | ||
top2 = 10-np.arange(0.0, 1.0, 0.01) | ||
top = 10-(.3*t)**2 | ||
mid = -t + 10 | ||
bot = 10-np.sqrt(t) | ||
bot2 = 1/(t) | ||
|
||
plot.plot(t,top2) | ||
plot.plot(t,top) | ||
plot.plot(t,mid) | ||
plot.plot(t,bot) | ||
plot.plot(t,bot2) | ||
|
||
plot.scatter(2,6) | ||
plot.scatter(2,5) | ||
plot.scatter(9,1) | ||
|
||
plot.xlabel('Cost') | ||
plot.ylabel('Risk') | ||
plot.title('Risk vs. Cost') | ||
plot.grid(True) | ||
plot.show() | ||
|
||
|
||
\\Risk = Money * Time | ||
\\ Time = constant * Money | ||
\\ Outsource Time = constant1 * Time + constant2 * Money |
Oops, something went wrong.