🔹 Linear Regression
from sklearn.linear_model import LinearRegression
import numpy as np
X = np.array([[1], [2], [3], [4]])
y = np.array([2, 4, 6, 8])
model = LinearRegression()
model.fit(X, y)
print("Prediction:", model.predict([[5]]))🔹 Multiple Linear Regression
X = np.array([[1,2], [2,3], [3,4], [4,5]])
y = np.array([3, 5, 7, 9])
model = LinearRegression()
model.fit(X, y)
print(model.predict([[5,6]]))
🔹 Polynomial Regression
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression
poly = PolynomialFeatures(degree=2)
X_poly = poly.fit_transform(X)
model = LinearRegression()
model.fit(X_poly, y)
🔹 Logistic Regression
from sklearn.linear_model import LogisticRegression
X = np.array([[1], [2], [3], [4]])
y = np.array([0, 0, 1, 1])
model = LogisticRegression()
model.fit(X, y)
print(model.predict([[2.5]]))
🔹 K-Nearest Neighbors
from sklearn.neighbors import KNeighborsClassifier
model = KNeighborsClassifier(n_neighbors=3)
model.fit(X, y)
print(model.predict([[3]]))
🔹 Naive Bayes
from sklearn.naive_bayes import GaussianNB
model = GaussianNB()
model.fit(X, y)
print(model.predict([[3]]))
🔹 Decision Tree
from sklearn.tree import DecisionTreeClassifier
model = DecisionTreeClassifier()
model.fit(X, y)
print(model.predict([[3]]))
🔹 Support Vector Machine
from sklearn.svm import SVC
model = SVC(kernel='linear')
model.fit(X, y)
print(model.predict([[3]]))
🔹 K-Means Clustering
from sklearn.cluster import KMeans
X = np.array([[1,2], [1,4], [1,0], [10,2], [10,4], [10,0]])
model = KMeans(n_clusters=2)
model.fit(X)
print(model.labels_)
🔹 Hierarchical Clustering
from sklearn.cluster import AgglomerativeClustering
model = AgglomerativeClustering(n_clusters=2)
labels = model.fit_predict(X)
print(labels)
🔹 Apriori Algorithm
from mlxtend.frequent_patterns import apriori, association_rules
import pandas as pd
data = {'Milk':[1,0,1,1],
'Bread':[1,1,1,0],
'Butter':[0,1,1,1]}
df = pd.DataFrame(data)
frequent = apriori(df, min_support=0.5, use_colnames=True)
rules = association_rules(frequent, metric="confidence", min_threshold=0.7)
print(rules)🔹 Principal Component Analysis
from sklearn.decomposition import PCA
pca = PCA(n_components=2)
X_reduced = pca.fit_transform(X)
print(X_reduced)
🔹 Multi-Layer Perceptron (MLP)
from sklearn.neural_network import MLPClassifier
model = MLPClassifier(hidden_layer_sizes=(10,))
model.fit(X, y)
print(model.predict([[3]]))
🔹 Random Forest
from sklearn.ensemble import RandomForestClassifier
model = RandomForestClassifier()
model.fit(X, y)
print(model.predict([[3]]))
🔹 AdaBoost
from sklearn.ensemble import AdaBoostClassifier
model = AdaBoostClassifier()
model.fit(X, y)
print(model.predict([[3]]))