2017-06-15 52 views
1

我写了这个代码:我怎么能新数据添加到SQL(与Python 2.7)

df = pd.DataFrame(maindatatable) 
 
now = datetime.date.today() 
 
df['date'] = now 
 
#df.rows = header 
 
df.to_csv('output.csv', sep=';', encoding='latin-1', index=True) 
 

 
connection = MySQLdb.connect(host='localhost', 
 
    user='root', 
 
    passwd='1234', 
 
    db='database') 
 
cursor = connection.cursor() 
 
query = """ load data local infile 'C:/Python27/output.csv' 
 
into table valami 
 
character set latin1 
 
fields terminated by ';' 
 
lines terminated by '\n' 
 
ignore 1 lines; 
 
""" 
 
cursor.execute(query) 
 
connection.commit() 
 
cursor.close()

我每天使用网络的刮从互联网上的表,我想导入到sql。我怎么能这样做,如果我每天运行我的代码,将值放入这个SQL表? (追加?)

output.csv代码:

filename=r'output.csv' 
 

 
resultcsv=open(filename,"wb") 
 
output=csv.writer(resultcsv, delimiter=';',quotechar = '"', quoting=csv.QUOTE_NONNUMERIC, encoding='latin-1') 
 
header = ['Pénznem', 'Devizanév','Egység','Pénznem_Forintban', 'date'] 
 
output.writerow(header) 
 

 
def make_soup(url): 
 
    thepage = urllib2.urlopen(url) 
 
    soupdata = BeautifulSoup(thepage, "html.parser") 
 
    return soupdata 
 

 
def to_2d(l,n): 
 
    return [l[i:i+n] for i in range(0, len(l), n)] 
 

 
soup=make_soup("https://www.mnb.hu/arfolyamok") 
 

 
datatable=[] 
 
for record in soup.findAll('tr'): 
 
    for data in record.findAll('td'): 
 
     datatable.append(data.text) 
 
maindatatable = to_2d(datatable, 4) 
 

 
output.writerows(maindatatable) 
 

 
resultcsv.close()

+0

你能告诉我们output.csv吗? – MishaVacic

+0

当然,我编辑我的问题! – tardos93

回答

3

我试图在我的解决方案解决this.Take了一眼,SQLAlchemy.You chould在MySQL中建立DATABSE和表shell之后,你可以运行python代码(我的版本是Ubuntu上的Python3)。

import csv 
import urllib.request 
import pandas as pd 
import datetime 
from sqlalchemy import create_engine 
import MySQLdb 
from bs4 import BeautifulSoup 

filename='output.csv' 

resultcsv=open(filename,"w") 
output=csv.writer(resultcsv, delimiter=';',quotechar = '"', quoting=csv.QUOTE_NONNUMERIC) 
header = ['Pénznem', 'Devizanév','Egység','Pénznem_Forintban', 'date'] 
output.writerow(header) 

with urllib.request.urlopen("https://www.mnb.hu/arfolyamok") as url: 
    s = url.read() 
    soup = BeautifulSoup(s, 'html.parser') 

def to_2d(l,n): 
    return [l[i:i+n] for i in range(0, len(l), n)] 

datatable=[] 
for record in soup.findAll('tr'): 
    for data in record.findAll('td'): 
     datatable.append(data.text) 
maindatatable = to_2d(datatable, 4) 

output.writerows(maindatatable) 
resultcsv.close() 

df = pd.DataFrame(maindatatable) 
print (df) 

engine = create_engine("mysql+mysqldb://you:"+'viktororban'+"@localhost/dbase") 
df.to_sql(con=engine,name = 'newtable', if_exists='fail', index=True) 

当然你可以改变passwd!

+0

谢谢,它正在为我工​​作^^ – tardos93