diff --git a/docker-compose/InstallDockerComposeOnAmaonLinux.txt b/docker-compose/InstallDockerComposeOnAmaonLinux.txt new file mode 100644 index 0000000..4458256 --- /dev/null +++ b/docker-compose/InstallDockerComposeOnAmaonLinux.txt @@ -0,0 +1,2 @@ +sudo curl -L --fail https://raw.githubusercontent.com/linuxserver/docker-docker-compose/master/run.sh -o /usr/local/bin/docker-compose +$ sudo chmod +x /usr/local/bin/docker-compose \ No newline at end of file diff --git a/dockerfiles/sampleToDockerize/app.py b/dockerfiles/sampleToDockerize/app.py new file mode 100644 index 0000000..60fdf75 --- /dev/null +++ b/dockerfiles/sampleToDockerize/app.py @@ -0,0 +1,24 @@ +from flask import Flask, jsonify, request +from utilities import predict_pipeline + +app = Flask(__name__) + + +@app.post('/predict') +def predict(): + data = request.json + try: + sample = data['text'] + except KeyError: + return jsonify({'error': 'No text sent'}) + + sample = [sample] + predictions = predict_pipeline(sample) + try: + result = jsonify(predictions[0]) + except TypeError as e: + result = jsonify({'error': str(e)}) + return result + +if __name__ == '__main__': + app.run(host='0.0.0.0', debug=True) \ No newline at end of file diff --git a/dockerfiles/sampleToDockerize/models/pipeline.pickle b/dockerfiles/sampleToDockerize/models/pipeline.pickle new file mode 100644 index 0000000..0f4d7f7 Binary files /dev/null and b/dockerfiles/sampleToDockerize/models/pipeline.pickle differ diff --git a/dockerfiles/sampleToDockerize/requirements.txt b/dockerfiles/sampleToDockerize/requirements.txt new file mode 100644 index 0000000..5d0d21c --- /dev/null +++ b/dockerfiles/sampleToDockerize/requirements.txt @@ -0,0 +1,4 @@ +flask>=2.0.0 +scikit-learn==1.0.1 +nltk==3.6.6 +# "import nltk; nltk.download('omw-1.4'); nltk.download('wordnet')" \ No newline at end of file diff --git a/dockerfiles/sampleToDockerize/utilities.py b/dockerfiles/sampleToDockerize/utilities.py new file mode 100644 index 0000000..0b34b0c --- /dev/null +++ b/dockerfiles/sampleToDockerize/utilities.py @@ -0,0 +1,108 @@ +import re +import pickle + +# nltk +from nltk.stem import WordNetLemmatizer + + +lemmatizer = WordNetLemmatizer() +# grouping together the inflected forms ("better" -> "good") + + +with open('models/pipeline.pickle', 'rb') as f: + loaded_pipe = pickle.load(f) + + +def predict_pipeline(text): + return predict(loaded_pipe, text) + + +# Defining dictionary containing all emojis with their meanings. +emojis = {':)': 'smile', ':-)': 'smile', ';d': 'wink', ':-E': 'vampire', ':(': 'sad', + ':-(': 'sad', ':-<': 'sad', ':P': 'raspberry', ':O': 'surprised', + ':-@': 'shocked', ':@': 'shocked',':-$': 'confused', ':\\': 'annoyed', + ':#': 'mute', ':X': 'mute', ':^)': 'smile', ':-&': 'confused', '$_$': 'greedy', + '@@': 'eyeroll', ':-!': 'confused', ':-D': 'smile', ':-0': 'yell', 'O.o': 'confused', + '<(-_-)>': 'robot', 'd[-_-]b': 'dj', ":'-)": 'sadsmile', ';)': 'wink', + ';-)': 'wink', 'O:-)': 'angel','O*-)': 'angel','(:-D': 'gossip', '=^.^=': 'cat'} + +## Defining set containing all stopwords in english. +stopwords = ['a', 'about', 'above', 'after', 'again', 'ain', 'all', 'am', 'an', + 'and','any','are', 'as', 'at', 'be', 'because', 'been', 'before', + 'being', 'below', 'between','both', 'by', 'can', 'd', 'did', 'do', + 'does', 'doing', 'down', 'during', 'each','few', 'for', 'from', + 'further', 'had', 'has', 'have', 'having', 'he', 'her', 'here', + 'hers', 'herself', 'him', 'himself', 'his', 'how', 'i', 'if', 'in', + 'into','is', 'it', 'its', 'itself', 'just', 'll', 'm', 'ma', + 'me', 'more', 'most','my', 'myself', 'now', 'o', 'of', 'on', 'once', + 'only', 'or', 'other', 'our', 'ours','ourselves', 'out', 'own', 're', + 's', 'same', 'she', "shes", 'should', "shouldve",'so', 'some', 'such', + 't', 'than', 'that', "thatll", 'the', 'their', 'theirs', 'them', + 'themselves', 'then', 'there', 'these', 'they', 'this', 'those', + 'through', 'to', 'too','under', 'until', 'up', 've', 'very', 'was', + 'we', 'were', 'what', 'when', 'where','which','while', 'who', 'whom', + 'why', 'will', 'with', 'won', 'y', 'you', "youd","youll", "youre", + "youve", 'your', 'yours', 'yourself', 'yourselves'] + + +def preprocess(textdata): + processed_texts = [] + + # Defining regex patterns. + url_pattern = r"((http://)[^ ]*|(https://)[^ ]*|( www\.)[^ ]*)" + user_pattern = '@[^\s]+' + alpha_pattern = "[^a-zA-Z0-9]" + sequence_pattern = r"(.)\1\1+" + seq_replace_pattern = r"\1\1" + + for tweet in textdata: + tweet = tweet.lower() + + # Replace all URls with 'URL' + tweet = re.sub(url_pattern, ' URL', tweet) + # Replace all emojis. + for emoji in emojis.keys(): + tweet = tweet.replace(emoji, "EMOJI" + emojis[emoji]) + # Replace @USERNAME to 'USER'. + tweet = re.sub(user_pattern, ' USER', tweet) + # Replace all non alphabets. + tweet = re.sub(alpha_pattern, " ", tweet) + # Replace 3 or more consecutive letters by 2 letter. + tweet = re.sub(sequence_pattern, seq_replace_pattern, tweet) + + preprocessed_words = [] + for word in tweet.split(): + # Check if the word is a stopword. + if len(word) > 1 and word not in stopwords: + # Lemmatizing the word. + word = lemmatizer.lemmatize(word) + preprocessed_words.append(word) + + processed_texts.append(' '.join(preprocessed_words)) + + return processed_texts + + +def predict(model, text): + # Predict the sentiment + preprocessed_text = preprocess(text) + predictions = model.predict(preprocessed_text) + + pred_to_label = {0: 'Negative', 1: 'Positive'} + + # Make a list of text with sentiment. + data = [] + for t, pred in zip(text, predictions): + data.append({'text': t, 'pred': int(pred), 'label': pred_to_label[pred]}) + + return data + + +if __name__=="__main__": + # Text to classify should be in a list. + text = ["I hate twitter", + "May the Force be with you.", + "Mr. Stark, I don't feel so good"] + + predictions = predict_pipeline(text) + print(predictions) \ No newline at end of file diff --git a/kmaster/Kubernetes1-24-1-Installation.txt b/kmaster/Kubernetes1-24-1-Installation.txt index a123c88..b8657ab 100644 --- a/kmaster/Kubernetes1-24-1-Installation.txt +++ b/kmaster/Kubernetes1-24-1-Installation.txt @@ -81,7 +81,7 @@ sudo sh get-docker.sh ls /etc/docker #check if the above directory exists - else create it sudo mkdir -p /etc/systemd/system/docker.service.d -13. Update daemon jsonĀ  +13. Update daemon json sudo tee /etc/docker/daemon.json <