I am trying to create a s3-bucket using AWS SDK for Python in PyCharm and facing the following error.
"An error occurred (IllegalLocationConstraintException) when calling the CreateBucket operation: The unspecified location constraint is incompatible for the region specific endpoint this request was sent to."
Here is my code:
import os
import boto3
from botocore.exceptions import ClientError
ACCESS_KEY = 'AWS_ACCESS_KEY_ID'
SECRET_KEY = 'AWS_SECRET_ACCESS_KEY'
PRI_BUCKET_NAME = 'soundcloud2'
TRANSIENT_BUCKET_NAME = 'soundcloud3'
def main():
"""entry point"""
access = os.getenv(ACCESS_KEY)
secret = os.getenv(SECRET_KEY)
s3 = boto3.resource('s3', aws_access_key_id=access, aws_secret_access_key=secret)
create_bucket(TRANSIENT_BUCKET_NAME, s3)
def create_bucket(name, s3):
try:
bucket = s3.create_bucket(Bucket=name)
except ClientError as ce:
print('error', ce)
if __name__ == '__main__':
main()
Try modifying your code as below:
bucket = s3.create_bucket(Bucket=name,
CreateBucketConfiguration={
'LocationConstraint': 'ap-south-1'}
)
or whatever region, unless your region is in the U.S.
Related
I use this function (https://developers.google.com/youtube/v3/docs/activities/list) to retrieve list of user activities.
I use my channelId and I don't see subscriptions for period before autumn of 2021. Can anyone explain me why? May be you write me when this type of activity (subscription) was added in the type of request, named Activities: list?
Thanks!
See below example code:
import os
import google_auth_oauthlib.flow
import googleapiclient.discovery
import googleapiclient.errors
scopes = ["https://www.googleapis.com/auth/youtube.readonly"]
def main():
# Disable OAuthlib's HTTPS verification when running locally.
# *DO NOT* leave this option enabled in production.
os.environ["OAUTHLIB_INSECURE_TRANSPORT"] = "1"
api_service_name = "youtube"
api_version = "v3"
client_secrets_file = "YOUR_CLIENT_SECRET_FILE.json"
# Get credentials and create an API client
flow = google_auth_oauthlib.flow.InstalledAppFlow.from_client_secrets_file(
client_secrets_file, scopes)
credentials = flow.run_console()
youtube = googleapiclient.discovery.build(
api_service_name, api_version, credentials=credentials)
request = youtube.activities().list(
part="snippet,contentDetails",
channelId="yourChannelId", #Note: paste your own channelId
maxResults=300
)
response = request.execute()
print(response)
if __name__ == "__main__":
main()
You can do an example of request in the right side:
https://developers.google.com/youtube/v3/docs/activities/list
I am making an API with Flask-RESTFUL, but when I make the POST
http://127.0.0.1:5000/bot?id_articulo=1&url_articulo=www.wiki.org
I get the message
"message": "The browser (or proxy) sent a request that this server could not understand."
My python code is
from flask import Flask
from flask_restful import Resource, Api, reqparse
import pandas as pd
app = Flask(__name__)
api = Api(app)
class Bot(Resource):
def post(self):
parser = reqparse.RequestParser()
parser.add_argument('id_articulo' , required=True, type=int)
parser.add_argument('url_articulo', required=True, type=str)
args = parser.parse_args()
print(args)
data_articulo = pd.read_csv('articulos.csv')
print(data_articulo)
if args['url_articulo'] in list(data_articulo['url']):
return {
'mensage': f"El artÃculo '{args['url_articulo']}' ya existe."
}, 409
else:
nueva_columna = pd.DataFrame({
'id_articulo': [args['id_articulo']],
'url': [args['url_articulo']],
})
data_articulo = data_articulo.append(nueva_columna, ignore_index=True)
data_articulo.to_csv('articulos.csv', index=False)
return {'data': data_articulo.to_dict()}, 200
api.add_resource(Bot, '/bot', methods=['POST'])
if __name__ == '__main__':
app.run()
Now, I noticed that the error message is thrown only when I am in a virtual environment whose requirements.txt is
aniso8601==9.0.1
click==8.1.3
colorama==0.4.5
Flask==2.1.2
Flask-RESTful==0.3.9
importlib-metadata==4.12.0
itsdangerous==2.1.2
Jinja2==3.1.2
joblib==1.1.0
MarkupSafe==2.1.1
numpy==1.23.1
pandas==1.4.3
python-dateutil==2.8.2
pytz==2022.1
six==1.16.0
Werkzeug==2.1.2
zipp==3.8.0
By this far, I don't have a clue about what is going on and it makes me think that the flask_restful library have issues with virtual environments and I would like to know how to make this work properly in one.
I'm a beginner of python. Below is my python code for telegram bot. It's working on XAMPP but I would to host the bot on cloud so that there's no need to start the XAMPP's Apache & MYSQL everytime when I'm trying to use the bot. However, it's not working after it's been uploaded to Heroku. May I know how can I fix this ? Thank you in advance.
Modified for uploading to Heroku
import logging
from telegram.ext import Updater, CommandHandler, MessageHandler, Filters
import os
import mysql.connector
from typing import Dict
from telegram import ReplyKeyboardMarkup, Update, ReplyKeyboardRemove
from telegram.ext import (
Updater,
CommandHandler,
MessageHandler,
Filters,
ConversationHandler,
CallbackContext,
)
PORT = int(os.environ.get('PORT', 5000))
# Enable logging
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.INFO)
mydb = mysql.connector.connect(
host='us-cdbr-east-05.cleardb.net',
user='b081bd520f9623',
passwd='557dad71',
database='heroku_26b1a208f24f1fa')
query = mydb.cursor()
logger = logging.getLogger(__name__)
TOKEN = '5333685233:AAFr4-1nB6_I8ZMdt25Y4zBotHRA9I_qtMI'
# Define a few command handlers. These usually take the two arguments update and
# context. Error handlers also receive the raised TelegramError object in error.
def start(update, context):
"""Send a message when the command /start is issued."""
update.message.reply_text('Hi! This is start')
def help(update, context):
"""Send a message when the command /help is issued."""
update.message.reply_text('Help!')
def sql(update, context):
sql = "SELECT nama_item, jumlah_dalam_kg FROM data_penjualan_harian WHERE nama_item = 'Lemon'"
query.execute(sql)
sql_result = query.fetchall()
pesan_balasan = ''
for x in sql_result:
pesan_balasan = pesan_balasan + str(x) + '\n'
#memperbagus balasan bot
#menghilangkan tanda petik
pesan_balasan = pesan_balasan.replace("'","")
#menghilangkan tanda kurung
pesan_balasan = pesan_balasan.replace("(","")
pesan_balasan = pesan_balasan.replace(")","")
#menghilangkan tanda koma
pesan_balasan = pesan_balasan.replace(",","")
update.message.reply_text(pesan_balasan)
def main():
updater = Updater(TOKEN, use_context=True)
# Get the dispatcher to register handlers
dp = updater.dispatcher
# on different commands - answer in Telegram
dp.add_handler(CommandHandler("start", start))
dp.add_handler(CommandHandler("help", help))
# on noncommand i.e message - echo the message on Telegram
dp.add_handler(MessageHandler(Filters.text, echo))
# log all errors
dp.add_error_handler(error)
# # Start the Bot
updater.start_webhook(listen="0.0.0.0",
port=PORT,
url_path=TOKEN,
webhook_url='https://powerful-lowlands-14039.herokuapp.com/' + TOKEN)
# Run the bot until you press Ctrl-C or the process receives SIGINT,
# SIGTERM or SIGABRT. This should be used most of the time, since
# start_polling() is non-blocking and will stop the bot gracefully.
updater.idle()
if __name__ == '__main__':
main()
from dotenv import load_dotenv
import os
from panda import *
from telegram.ext import *
from telegram.update import *
load_dotenv('.env')
Token =os.getenv('TOKEN')
print("The bot connected .....")
# commands handler
# start message
def start_command(update,context):
update.message.reply_text("Hello am mr panda am here to help you: ")
# help command
def help_command(update,context):
res = panda.help()
update.message.reply_text(res)
# message handler
**def message_handle(update,context):
message = str(update.message.text).lower()
respose = panda.hello(message)
update.message.reply_text(respose)**
# errror handler
def error(update,context):
print(f"Update the context error : {context.error}")
# main function
def main():
global message
updater =Updater(Token,use_context=True)
dp = updater.dispatcher
# command handlers
dp.add_handler(CommandHandler("start",start_command))
dp.add_handler(CommandHandler("help",help_command))
**# message handlers
dp.add_handler(MessageHandler(Filters.text,message_handle))**
# error handlers
dp.add_error_handler(error)
updater.start_polling()
updater.idle()
main()
This was the code Am getting error
Update the context error: local variable 'message' referenced before assignment
I think there is an error on the highlighted portions I do little searches and I referred to the documentation too I can't catch the error.
Does anyone have solution that would be great :)
we are trying to migrate data from aws s3 to gcp storage. we tried transfer job in gcp and its working fine. So we wanted to achieve that programmatically with aws lambda since we have dependencies on aws.
When i tried importing google.cloud module I am getting this error
lambda cloudwatch logs
Here is my code:
import os
import logging
import boto3
#from StringIO import StringIO
from google.cloud import storage
#import google-cloud-storage
# Setup logging
LOG = logging.getLogger(__name__)
LOG.setLevel(os.environ.get('LOG_LEVEL', 'INFO'))
GCS_BUCKET_NAME=os.environ['GCS_BUCKET_NAME']
S3 = boto3.client('s3')
def lambda_handler(event, context):
try:
l_t_bucketKey = _getKeys(event)
# Create google client
storage_client = storage.Client()
gcs_bucket = storage_client.get_bucket(os.environ['GCS_BUCKET_NAME'])
LOG.debug('About to copy %d files', len(l_t_bucketKey))
for bucket, key in l_t_bucketKey:
try:
inFileObj = StringIO()
S3.download_fileobj(
Bucket=bucket,
Key=key,
Fileobj=inFileObj
)
blob = gcs_bucket.blob(key)
blob.upload_from_file(inFileObj, rewind=True) # seek(0) before reading file obj
LOG.info('Copied s3://%s/%s to gcs://%s/%s', bucket, key, GCS_BUCKET_NAME, key)
except:
LOG.exception('Error copying file: {k}'.format(k=key))
return 'SUCCESS'
except Exception as e:
LOG.exception("Lambda function failed:")
return 'ERROR'
def _getKeys(d_event):
"""
Extracts (bucket, key) from event
:param d_event: Event dict
:return: List of tuples (bucket, key)
"""
l_t_bucketKey = []
if d_event:
if 'Records' in d_event and d_event['Records']:
for d_record in d_event['Records']:
try:
bucket = d_record['s3']['bucket']['name']
key = d_record['s3']['object']['key']
l_t_bucketKey.append((bucket, key))
except:
LOG.warn('Error extracting bucket and key from event')
return l_t_bucketKey
And I downloaded google-cloud-storage module from pypi website and imported that in aws lambda layer. Please help in providing me the best link for downloading this module.
Google Storage Bucket can be used with S3 APIs, so you can just use it in your Lambda functions without any extra GCP libraries.
source_client = boto3.client(
's3',
endpoint_url='https://storage.googleapis.com',
aws_access_key_id=os.environ['GCP_KEY'],
aws_secret_access_key=os.environ['GCP_SECRET']
To get access_key and secret - go to the GS bucket settings -> Interoperability -> Access keys for your user account -> Create a key