From 45528ad8e803d8990b5249889eea5c4d4c2480fc Mon Sep 17 00:00:00 2001 From: p3prsml7a <2461421302@qq.com> Date: Wed, 31 May 2023 21:36:48 +0800 Subject: [PATCH] ADD file via upload --- 20407104-邓华锋-计科2001.html | 14810 +++++++++++++++++++++++++++ 1 file changed, 14810 insertions(+) create mode 100644 20407104-邓华锋-计科2001.html diff --git a/20407104-邓华锋-计科2001.html b/20407104-邓华锋-计科2001.html new file mode 100644 index 0000000..383a88f --- /dev/null +++ b/20407104-邓华锋-计科2001.html @@ -0,0 +1,14810 @@ + + +
+ + +# your code
+def Factorial(n):
+ #设置终止条件
+ if n == 1:
+ return 1
+ return n*Factorial(n-1)
+Num = 0
+for n in range(1,21):
+ res = Factorial(n)
+ Num += res
+print(Num)
+
2561327494111820313 ++
# your code
+s=[9,7,8,3,2,1,55,6]
+print(len(s))
+print(max(s))
+print(min(s))
+s.append(10)
+s.pop(6)
+
8 +55 +1 ++
55+
TTTTTx
+TTTTxx
+TTTxxx
+TTxxxx
+Txxxxx
+
+# your code
+for i in range(0,5):
+ for t in range(0,5-i):
+ print("T",end="")
+ for x in range(5-i,6):
+ print("x",end="")
+ print()
+
TTTTTx +TTTTxx +TTTxxx +TTxxxx +Txxxxx ++
# your code
+def Add(a,b):
+ return a+b
+def Sub(a,b):
+ return a-b
+def Mul(a,b):
+ return a*b
+def Div(a,b):
+ if(b==0):
+ print("Error!")
+ return
+ return a/b
+while True:
+ Choice = input("Choice:")
+ if(Choice == '0'):
+ break
+ a = int(input("a:"))
+ b = int(input("b:"))
+ if(Choice == '1'):
+ print(Add(a,b))
+ elif(Choice == '2'):
+ print(Sub(a,b))
+ elif(Choice == '3'):
+ print(Mul(a,b))
+ elif(Choice == '4'):
+ print(Div(a,b))
+
Choice:1 +a:6 +b:3 +9 +Choice:4 +a:2 +b:1 +2.0 +Choice:0 ++
# your code
+class Student:
+ def __init__(self,name,age,*course):
+ self.name = name
+ self.age = age
+ self.course = course
+ def get_name(self):
+ return self.name
+ def get_age(self):
+ return self.age
+ def get_course(self):
+ return max(max(self.course))
+st=Student('zhangming',20,[69,88,100])
+print('学生姓名为:',st.get_name(),'学生年龄为:',st.get_age(),'学生最高成绩为:',st.get_course())
+
学生姓名为: zhangming 学生年龄为: 20 学生最高成绩为: 100 ++
X | +Y | +X | +Y | +
---|---|---|---|
-3.00 | +4 | +0.15 | +255 | +
-2.50 | +12 | +0.75 | +170 | +
-1.75 | +50 | +1.25 | +100 | +
-1.15 | +120 | +1.85 | +20 | +
-0.50 | +205 | +2.45 | +14 | +
# your code
+import matplotlib.pyplot as plt
+x = ['-3.00','-2.50','-1.75','-1.15','-0.50','0.15','0.75','1.25','1.85','2.45']
+y = [4,12,50,120,205,255,170,100,20,14]
+plt.bar(x,y)
+plt.show()
+
注:训练集:测试集=8:2,随机种子采用你学号后两位,例如你学号后两位=01,则random_state=1,如果最后两位=34,则random_state=34。最终结果打印出各个回归的w和b系数即可。
+序号 | +X1 | +X2 | +X3 | +X4 | +Y | +
---|---|---|---|---|---|
1 | +7 | +26 | +6 | +60 | +78.5 | +
2 | +1 | +29 | +15 | +52 | +74.3 | +
3 | +11 | +56 | +8 | +20 | +104.3 | +
4 | +11 | +31 | +8 | +47 | +87.6 | +
5 | +7 | +52 | +6 | +33 | +95.9 | +
6 | +11 | +55 | +9 | +22 | +109.2 | +
7 | +3 | +71 | +17 | +6 | +102.7 | +
8 | +1 | +31 | +22 | +44 | +72.5 | +
9 | +2 | +54 | +18 | +22 | +93.1 | +
10 | +21 | +47 | +4 | +26 | +115.9 | +
11 | +1 | +40 | +23 | +34 | +83.8 | +
12 | +11 | +66 | +9 | +12 | +113.3 | +
13 | +10 | +68 | +8 | +12 | +109.4 | +
# your code
+import pandas as pd
+from sklearn.model_selection import train_test_split
+from sklearn.linear_model import LinearRegression, Ridge, Lasso
+
+# 读取原始数据并创建数据框
+data = pd.DataFrame({
+ 'X1': [7, 1, 11, 11, 7, 11, 3, 1, 2, 21, 1, 11, 10],
+ 'X2': [26, 29, 56, 31, 52, 55, 71, 31, 54, 47, 40, 66, 68],
+ 'X3': [6, 15, 8, 8, 6, 9, 17, 22, 18, 4, 23, 9, 13],
+ 'X4': [60, 52, 20, 47, 33, 22, 6, 44, 22, 26, 34, 22, 22],
+ 'Y': [78.5, 74.3, 104.3, 87.6, 95.9, 109.2, 102.7, 72.5, 93.1, 115.9, 83.8, 113.3, 109.4]
+})
+
+# 分离出自变量和因变量
+X = data[['X1', 'X2', 'X3', 'X4']]
+y = data['Y']
+
+# 将训练集和测试集按 8:2 分割,随机种子为学号后两位
+X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=4)
+
+# 线性回归模型
+linear_model = LinearRegression()
+linear_model.fit(X_train, y_train)
+
+# 输出线性回归的 w 和 b 系数
+print('线性回归 w:', linear_model.coef_)
+print('线性回归 b:', linear_model.intercept_)
+
+# 岭回归模型
+ridge_model = Ridge(alpha=1.0)
+ridge_model.fit(X_train, y_train)
+
+# 输出岭回归的 w 和 b 系数
+print('岭回归 w:', ridge_model.coef_)
+print('岭回归 b:', ridge_model.intercept_)
+
+# Lasso 回归模型
+lasso_model = Lasso(alpha=1.0)
+lasso_model.fit(X_train, y_train)
+
+# 输出 Lasso 回归的 w 和 b 系数
+print('Lasso 回归 w:', lasso_model.coef_)
+print('Lasso 回归 b:', lasso_model.intercept_)
+
线性回归 w: [ 1.29033673 0.47019403 -0.14727956 -0.24781247] +线性回归 b: 73.3298572021815 +岭回归 w: [ 1.27046223 0.46668927 -0.1633555 -0.25416358] +岭回归 b: 74.05626863164781 +Lasso 回归 w: [ 1.28626721 0.47702355 -0.12442651 -0.23834803] +Lasso 回归 b: 72.44155045106855 ++
注:训练集:测试集=1:1,随机种子采用你学号后两位,例如你学号后两位=01,则random_state=1,如果最后两位=34,则random_state=34。最终结果输出你预测结果、实际结果以及模型得分三项。
+序号 | +年龄 | +收入 | +是否为学生 | +信誉 | +购买计算机 | +
---|---|---|---|---|---|
1 | +<=30 | +高 | +否 | +中 | +否 | +
2 | +<=30 | +高 | +否 | +优 | +否 | +
3 | +31-40 | +高 | +否 | +中 | +是 | +
4 | +>40 | +中 | +否 | +中 | +是 | +
5 | +>40 | +低 | +是 | +中 | +是 | +
6 | +>40 | +低 | +是 | +优 | +否 | +
7 | +31-40 | +低 | +是 | +优 | +是 | +
8 | +<=30 | +中 | +否 | +中 | +否 | +
9 | +<=30 | +低 | +是 | +中 | +是 | +
10 | +>40 | +中 | +是 | +中 | +是 | +
11 | +<=30 | +中 | +是 | +优 | +是 | +
12 | +31-40 | +中 | +否 | +优 | +是 | +
13 | +31-40 | +高 | +是 | +中 | +是 | +
14 | +>40 | +中 | +否 | +优 | +否 | +
# your code
+
+import numpy as np
+import pandas as pd
+from sklearn import metrics
+# 导入高斯朴素贝叶斯分类器
+from sklearn.naive_bayes import GaussianNB
+from sklearn.model_selection import train_test_split
+
+x = np.array(
+ [
+ [1, 3, 0, 1, 0],
+ [1, 3, 0, 2, 1],
+ [2, 3, 0, 2, 1],
+ [3, 2, 0, 1, 1],
+ [3, 1, 1, 1, 1],
+ [3, 1, 1, 2, 0],
+ [2, 1, 1, 2, 1],
+ [1, 2, 0, 1, 0],
+ [1, 1, 1, 1, 1],
+ [3, 2, 1, 1, 1],
+ [1, 2, 1, 2, 1],
+ [2, 2, 0, 2, 1],
+ [2, 3, 1, 1, 1],
+ [3, 2, 0, 2, 0],
+ ]
+)
+
+y = np.array(
+ [
+ 0,1,1,1,1,0,1,0,1,1,1,1,1,0
+ ]
+)
+X_train, X_test, y_train, y_test = train_test_split(x, y, test_size=0.5, random_state=4)
+# 使用高斯朴素贝叶斯进行计算
+clf = GaussianNB()
+clf.fit(X_train, y_train)
+# 评估
+y_predict = clf.predict(X_test)
+score_gnb = metrics.accuracy_score(y_predict,y_test)
+
+print('该用户是否购买计算机:',y_predict)
+print(y_test)
+print(score_gnb)
+
该用户是否购买计算机: [1 1 1 1 1 0 1] +[1 1 1 1 1 0 1] +1.0 ++
+