import pandas as pd import pickle from tensorflow.keras.models import load_model from Feature import extract_url_features from collections import Counter from scipy.stats import entropy import tensorflow as tf # ๐Ÿ”น URL ์—”ํŠธ๋กœํ”ผ ๊ณ„์‚ฐ ํ•จ์ˆ˜ def calculate_url_entropy(url): counter = Counter(url) probabilities = [count / len(url) for count in counter.values()] return entropy(probabilities, base=2) # ๐Ÿ”น ์Šค์ผ€์ผ๋Ÿฌ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ with open("scaler.pkl", "rb") as f: scaler = pickle.load(f) # ๐Ÿ”น ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ model = load_model("best_model.h5") # ๐Ÿ”น ์˜ˆ์ธก ํ•จ์ˆ˜ @tf.function(reduce_retracing=True) def predict_with_model(model, input_data): return model(input_data) # ๐Ÿ”น ์ž…๋ ฅ URL ๋ฐ›๊ธฐ url = input("URL์ž…๋ ฅ : ") # ๐Ÿ”น Feature.py์—์„œ ํ”ผ์ฒ˜ ์ถ”์ถœ features = extract_url_features(url) # ๐Ÿ”น ๋ˆ„๋ฝ๋œ ํ”ผ์ฒ˜ ๋ณด์™„ features['url_length'] = len(url) features['url_entropy'] = calculate_url_entropy(url) # ๐Ÿ”น ๋ฐ์ดํ„ฐํ”„๋ ˆ์ž„ ์ƒ์„ฑ ๋ฐ ์ •๋ ฌ input_df = pd.DataFrame([features]) expected_columns = list(scaler.feature_names_in_) input_df = input_df[expected_columns] # ๐Ÿ”น ์Šค์ผ€์ผ๋ง input_scaled = scaler.transform(input_df) # ๐Ÿ”น ์˜ˆ์ธก prediction = predict_with_model(model, input_scaled) score = float(prediction.numpy()[0][0]) # ๐Ÿ”ฅ ์ •ํ™•ํžˆ float์œผ๋กœ ๋ณ€ํ™˜ # ๐Ÿ”น ์ถœ๋ ฅ threshold = 0.5 if score > threshold: print(f"์•…์„ฑ (์•…์„ฑ์ผ ํ™•๋ฅ : {score:.4f})") else: print(f"์ •์ƒ (์ •์ƒ์ผ ํ™•๋ฅ : {1 - score:.4f})")