python Implementation Database Data Add Query and Update Sample Code
- 2021-08-17 00:21:47
- OfStack
1. Preface
Recently, I tested web website and encountered many functions that need to make data in batches. For example, the number of data displayed on a certain page needs to reach 10,000 for testing. At this time, it is definitely impossible to manually construct data. At this time, data can only be automatically constructed through python script; This data construction mainly involves adding data in a certain table in batches, adding data synchronously in several related tables in batches, querying qualified data in a certain table in batches, updating qualified data in a certain table in batches, etc.
2. Adding data
That is, batch adding data to a table.
insert_data.py
import pymysql
import random
import time
from get_userinfo import get_userinfo
from get_info import get_info
from get_tags import get_tags
from get_tuser_id import get_utag
class DatabaseAccess():
def __init__(self):
self.__db_host = "xxxxx"
self.__db_port = 3307
self.__db_user = "root"
self.__db_password = "123456"
self.__db_database = "xxxxxx"
# Connect to a database
def isConnectionOpen(self):
self.__db = pymysql.connect(
host=self.__db_host,
port=self.__db_port,
user=self.__db_user,
password=self.__db_password,
database=self.__db_database,
charset='utf8'
)
# Insert data
def linesinsert(self,n,user_id,tags_id,created_at):
self.isConnectionOpen()
# Create a cursor
global cursor
conn = self.__db.cursor()
try:
sql1 = '''
INSERT INTO `codeforge_new`.`cf_user_tag`(`id`, `user_id`,
`tag_id`, `created_at`, `updated_at`) VALUES ({}, {},
{}, '{}', '{}');
'''.format(n,user_id,tags_id,created_at,created_at)
# Execute SQL
conn.execute(sql1,)
except Exception as e:
print(e)
finally:
# Close the cursor
conn.close()
self.__db.commit()
self.__db.close()
def get_data(self):
# Generate corresponding data 1000 Article
for i in range(0,1001):
created_at = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime())
# print(create_at)
# Users id
tuserids = []
tuserid_list = get_utag()
for tuserid in tuserid_list:
tuserids.append(tuserid[0])
# print(tuserids)
userid_list = get_userinfo()
user_id = random.choice(userid_list)[0]
if user_id not in tuserids:
user_id=user_id
# Label id
tagsid_list = get_tags()
tags_id = random.choice(tagsid_list)[0]
self.linesinsert(i,user_id,tags_id,created_at)
if __name__ == "__main__":
# Instantiate object
db=DatabaseAccess()
db.get_data()
2. Data batch query
select_data.py
import pymysql
import pandas as pd
import numpy as np
def get_tags():
# Connection database, address, port, user name, password, database name, data format
conn = pymysql.connect(host='xxx.xxx.xxx.xxx',port=3307,user='root',passwd='123456',db='xxxx',charset='utf8')
cur = conn.cursor()
# Table cf_users Get all users in id
sql = 'select id from cf_tags where id between 204 and 298'
# Will user_id Column to list output
df = pd.read_sql(sql,con=conn)
# Use first array() Will DataFrame Conversion 1 Under
df1 = np.array(df)
# Then the converted data is used tolist() Turn to list
df2 = df1.tolist()
# cur.execute(sql)
# data = cur.fetchone()
# print(df)
# print(df1)
# print(df2)
return df2
conn.close()
3. Update data in batches
select_data.py
import pymysql
import pandas as pd
import numpy as np
def get_tags():
# Connection database, address, port, user name, password, database name, data format
conn = pymysql.connect(host='xxx.xxx.xxx.xxx',port=3307,user='root',passwd='123456',db='xxxx',charset='utf8')
cur = conn.cursor()
# Table cf_users Get all users in id
sql = 'select id from cf_tags where id between 204 and 298'
# Will user_id Column to list output
df = pd.read_sql(sql,con=conn)
# Use first array() Will DataFrame Conversion 1 Under
df1 = np.array(df)
# Then the converted data is used tolist() Turn to list
df2 = df1.tolist()
# cur.execute(sql)
# data = cur.fetchone()
# print(df)
# print(df1)
# print(df2)
return df2
conn.close()
The above is python database data add, query and update sample code details, more about python database add, query and update information please pay attention to this site other related articles!