From 11bf95c4646f14d5eb750a83a03f627caba0f856 Mon Sep 17 00:00:00 2001 From: pq5abxrt4 <2316909265@qq.com> Date: Tue, 30 May 2023 13:32:53 +0800 Subject: [PATCH] ADD file via upload --- 20407148-张利红-计科2001.html | 15513 +++++++++++++++++++++++++++ 1 file changed, 15513 insertions(+) create mode 100644 20407148-张利红-计科2001.html diff --git a/20407148-张利红-计科2001.html b/20407148-张利红-计科2001.html new file mode 100644 index 0000000..2aecdeb --- /dev/null +++ b/20407148-张利红-计科2001.html @@ -0,0 +1,15513 @@ + + +
+ + +total = 0
+factorial = 1
+for i in range(1, 21):
+ # 计算 i! 的值
+ factorial *= i
+ #累加 factorial 的值
+ total += factorial
+print("和为:",total)
+
和为: 2561327494111820313 ++
s = [9, 7, 8, 3, 2, 1, 55, 6]
+# 计算列表元素的个数
+num = len(s)
+print("元素个数为:", num)
+
+# 找到列表中的最大数和最小数
+max_num = max(s)
+min_num = min(s)
+print("最大数为:", max_num)
+print("最小数为:", min_num)
+
+# 添加一个元素10
+s.append(10)
+# 删除一个元素55
+s.remove(55)
+
+print("最终列表为:", s)
+
元素个数为: 8 +最大数为: 55 +最小数为: 1 +最终列表为: [9, 7, 8, 3, 2, 1, 6, 10] ++
TTTTTx
+TTTTxx
+TTTxxx
+TTxxxx
+Txxxxx
+
+for i in range(5):
+ for j in range(5 - i):
+ print("T", end="")
+ for k in range(i):
+ print("x", end="")
+ print("")
+
TTTTT +TTTTx +TTTxx +TTxxx +Txxxx ++
def add(x, y):
+ return x + y
+def subtract(x, y):
+ return x - y
+def multiply(x, y):
+ return x * y
+def divide(x, y):
+ if y == 0:
+ return "除数不能为0"
+ else:
+ return x / y
+
+print("选择要进行的运算:")
+print("1. 加法")
+print("2. 减法")
+print("3. 乘法")
+print("4. 除法")
+
+choice = input("请输入您的选择(1/2/3/4):")
+num1 = int(input("请输入第一个数字:"))
+num2 = int(input("请输入第二个数字:"))
+
+if choice == '1':
+ print(num1, "+", num2, "=", add(num1, num2))
+elif choice == '2':
+ print(num1, "-", num2, "=", subtract(num1, num2))
+elif choice == '3':
+ print(num1, "*", num2, "=", multiply(num1, num2))
+elif choice == '4':
+ print(num1, "/", num2, "=", divide(num1, num2))
+else:
+ print("非法输入")
+
选择要进行的运算: +1. 加法 +2. 减法 +3. 乘法 +4. 除法 +请输入您的选择(1/2/3/4):2 +请输入第一个数字:1 +请输入第二个数字:3 +1 - 3 = -2 ++
class Student:
+ def __init__(self, name, age, course):
+ self.name = name
+ self.age = age
+ self.course = course
+
+ def get_name(self):
+ return self.name
+
+ def get_age(self):
+ return self.age
+
+ def get_course(self):
+ return max(self.course)
+
+st = Student('zhangming', 20, [69, 88, 100])
+print("学生姓名:", st.get_name())
+print("学生年龄:", st.get_age())
+print("最高分数:", st.get_course())
+
学生姓名: zhangming +学生年龄: 20 +最高分数: 100 ++
X | +Y | +X | +Y | +
---|---|---|---|
-3.00 | +4 | +0.15 | +255 | +
-2.50 | +12 | +0.75 | +170 | +
-1.75 | +50 | +1.25 | +100 | +
-1.15 | +120 | +1.85 | +20 | +
-0.50 | +205 | +2.45 | +14 | +
import matplotlib.pyplot as plt
+
+x = [-3.00,-2.50,-1.75,-1.15,-0.50,0.15,0.75,1.25,1.85,2.45]
+y = [4,12,50,120,205,255,170,100,20,14]
+
+plt.bar(x, y, width=0.2)
+plt.xlabel('X')
+plt.ylabel('Y')
+
+plt.show()
+
注:训练集:测试集=8:2,随机种子采用你学号后两位,例如你学号后两位=01,则random_state=1,如果最后两位=34,则random_state=34。最终结果打印出各个回归的w和b系数即可。
+序号 | +X1 | +X2 | +X3 | +X4 | +Y | +
---|---|---|---|---|---|
1 | +7 | +26 | +6 | +60 | +78.5 | +
2 | +1 | +29 | +15 | +52 | +74.3 | +
3 | +11 | +56 | +8 | +20 | +104.3 | +
4 | +11 | +31 | +8 | +47 | +87.6 | +
5 | +7 | +52 | +6 | +33 | +95.9 | +
6 | +11 | +55 | +9 | +22 | +109.2 | +
7 | +3 | +71 | +17 | +6 | +102.7 | +
8 | +1 | +31 | +22 | +44 | +72.5 | +
9 | +2 | +54 | +18 | +22 | +93.1 | +
10 | +21 | +47 | +4 | +26 | +115.9 | +
11 | +1 | +40 | +23 | +34 | +83.8 | +
12 | +11 | +66 | +9 | +12 | +113.3 | +
13 | +10 | +68 | +8 | +12 | +109.4 | +
import numpy as np
+from sklearn.model_selection import train_test_split
+
+data = np.array([[ 7, 26, 6, 60, 78.5],
+ [ 1, 29, 15, 52, 74.3],
+ [11, 56, 8, 20,104.3],
+ [11, 31, 8, 47, 87.6],
+ [ 7, 52, 6, 33, 95.9],
+ [11, 55, 9, 22,109.2],
+ [ 3, 71, 17, 6,102.7],
+ [ 1, 31, 22, 44, 72.5],
+ [ 2, 54, 18, 22, 93.1],
+ [21, 47, 4, 26,115.9],
+ [ 1, 40, 23, 34, 83.8],
+ [11, 66, 9, 12,113.3],
+ [10, 68, 8, 12,109.4]])
+
+X = data[:,0:4]
+y = data[:,4]
+
+X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=48)
+
+#线性回归
+from sklearn.linear_model import LinearRegression
+
+lr = LinearRegression()
+lr.fit(X_train, y_train)
+
+print('线性回归:')
+print('w:', lr.coef_)
+print('b:', lr.intercept_)
+
+#岭回归
+from sklearn.linear_model import Ridge
+
+ridge = Ridge(alpha=1.0) # alpha值可以更换试验寻找最好的效果
+ridge.fit(X_train, y_train)
+
+print('岭回归:')
+print('w:', ridge.coef_)
+print('b:', ridge.intercept_)
+
+#Lasso回归
+from sklearn.linear_model import Lasso
+
+lasso = Lasso(alpha=1.0) # alpha值可以更换试验寻找最好的效果
+lasso.fit(X_train, y_train)
+
+print('Lasso回归:')
+print('w:', lasso.coef_)
+print('b:', lasso.intercept_)
+
线性回归: +w: [ 1.14735687 0.21239976 -0.32800606 -0.42396249] +b: 92.378230190439 +岭回归: +w: [ 1.09637836 0.16865872 -0.37832815 -0.46696953] +b: 96.73786398238788 +Lasso回归: +w: [ 1.18866611 0.26280478 -0.25349015 -0.37142781] +b: 87.20251353404662 ++
注:训练集:测试集=1:1,随机种子采用你学号后两位,例如你学号后两位=01,则random_state=1,如果最后两位=34,则random_state=34。最终结果输出你预测结果、实际结果以及模型得分三项。
+序号 | +年龄 | +收入 | +是否为学生 | +信誉 | +购买计算机 | +
---|---|---|---|---|---|
1 | +<=30 | +高 | +否 | +中 | +否 | +
2 | +<=30 | +高 | +否 | +优 | +否 | +
3 | +31-40 | +高 | +否 | +中 | +是 | +
4 | +>40 | +中 | +否 | +中 | +是 | +
5 | +>40 | +低 | +是 | +中 | +是 | +
6 | +>40 | +低 | +是 | +优 | +否 | +
7 | +31-40 | +低 | +是 | +优 | +是 | +
8 | +<=30 | +中 | +否 | +中 | +否 | +
9 | +<=30 | +低 | +是 | +中 | +是 | +
10 | +>40 | +中 | +是 | +中 | +是 | +
11 | +<=30 | +中 | +是 | +优 | +是 | +
12 | +31-40 | +中 | +否 | +优 | +是 | +
13 | +31-40 | +高 | +是 | +中 | +是 | +
14 | +>40 | +中 | +否 | +优 | +否 | +
import numpy as np
+import pandas as pd
+from sklearn import metrics
+# 导入高斯朴素贝叶斯分类器
+from sklearn.naive_bayes import GaussianNB
+from sklearn.model_selection import train_test_split
+
+x = np.array(
+ [
+ [1, 3, 0, 1, 0],
+ [1, 3, 0, 2, 1],
+ [2, 3, 0, 2, 1],
+ [3, 2, 0, 1, 1],
+ [3, 1, 1, 1, 1],
+ [3, 1, 1, 2, 0],
+ [2, 1, 1, 2, 1],
+ [1, 2, 0, 1, 0],
+ [1, 1, 1, 1, 1],
+ [3, 2, 1, 1, 1],
+ [1, 2, 1, 2, 1],
+ [2, 2, 0, 2, 1],
+ [2, 3, 1, 1, 1],
+ [3, 2, 0, 2, 0],
+ ]
+)
+
+y = np.array(
+ [
+ 0,1,1,1,1,0,1,0,1,1,1,1,1,0
+ ]
+)
+X_train, X_test, y_train, y_test = train_test_split(x, y, test_size=0.5, random_state=48
+ )
+# 使用高斯朴素贝叶斯进行计算
+clf = GaussianNB()
+clf.fit(X_train, y_train)
+# 评估
+y_predict = clf.predict(X_test)
+score_gnb = metrics.accuracy_score(y_predict,y_test)
+
+print('该用户是否购买计算机:',y_predict)
+print(y_test)
+print(score_gnb)
+
该用户是否购买计算机: [0 1 1 1 1 1 0] +[0 1 1 1 1 1 0] +1.0 ++