db.define_table('t_mitabla',
Field('f_titulo', type='string', label=T('Titulo')),
Field('f_imagen', type='upload',label=T('Imagen')),
Field('f_notas', type='text', label=T('Notas')),
auth.signature,
format='%(f_titulo)s',
migrate=settings.migrate)
import os
import urllib
import webapp2
from google.appengine.ext import blobstore
from google.appengine.ext.webapp import blobstore_handlers
class MainHandler(webapp2.RequestHandler):
def get(self):
upload_url = blobstore.create_upload_url('/upload')
self.response.out.write('<html><body>')
self.response.out.write('<form action="%s" method="POST" enctype="multipart/form-data">' % upload_url)
self.response.out.write("""Upload File: <input type="file" name="file"><br> <input type="submit"
name="submit" value="Submit"> </form></body></html>""")
class UploadHandler(blobstore_handlers.BlobstoreUploadHandler):
def post(self):
upload_files = self.get_uploads('file') # 'file' is file upload field in the form
blob_info = upload_files[0]
self.redirect('/serve/%s' % blob_info.key())
class ServeHandler(blobstore_handlers.BlobstoreDownloadHandler):
def get(self, resource):
resource = str(urllib.unquote(resource))
blob_info = blobstore.BlobInfo.get(resource)
self.send_blob(blob_info)
app = webapp2.WSGIApplication([('/', MainHandler),
('/upload', UploadHandler),
('/serve/([^/]+)?', ServeHandler)],
debug=True)
import urllib2
def index(): #Cargamos la página web response = urllib2.urlopen(site)
return locals()
from gcloud import storage
def index(): bucket = storage.get_bucket(myproject, mybucket_name)
return locals()
def index():
req = client.buckets().get( bucket='mybucket', fields='location,website(mainPageSuffix)') # optional resp = req.execute() return locals()
import argparseimport httplib2import osimport sysimport json
from apiclient import discoveryfrom oauth2client import filefrom oauth2client import clientfrom oauth2client import tools
# Define sample variables._BUCKET_NAME = 'mybucket'_API_VERSION = 'v1'
# Parser for command-line arguments.parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, parents=[tools.argparser])
# CLIENT_SECRETS is name of a file containing the OAuth 2.0 information for this# application, including client_id and client_secret. You can see the Client ID# and Client secret on the APIs page in the Cloud Console:CLIENT_SECRETS = os.path.join(os.path.dirname(__file__), 'client_secrets.json')
# Set up a Flow object to be used for authentication.# Add one or more of the following scopes. PLEASE ONLY ADD THE SCOPES YOU# NEED. For more information on using scopes please seeFLOW = client.flow_from_clientsecrets(CLIENT_SECRETS, scope=[ ], message=tools.message_if_missing(CLIENT_SECRETS))
def index(): cliente=CLIENT_SECRETS flow= FLOW# va= main(sys.argv) # Parse the command-line flags.# flags = parser.parse_args(argv[1:]) # If the credentials don't exist or are invalid run through the native client # flow. The Storage object will ensure that if successful the good # credentials will get written back to the file. storage = file.Storage('sample.dat') credentials = storage.get() if credentials is None or credentials.invalid: credentials = tools.run_flow(FLOW, storage, flags)
# Create an httplib2.Http object to handle our HTTP requests and authorize it # with our good Credentials. http = httplib2.Http() http = credentials.authorize(http)
# Construct the service object for the interacting with the Cloud Storage API. service = discovery.build('storage', _API_VERSION, http=http)
try: req = service.buckets().get(bucket=_BUCKET_NAME) resp = req.execute() print1= json.dumps(resp, indent=2)
fields_to_return = 'nextPageToken,items(name,size,contentType,metadata(my-key))' req = service.objects().list(bucket=_BUCKET_NAME, fields=fields_to_return) # If you have too many items to list in one request, list_next() will # automatically handle paging with the pageToken. while req is not None: resp = req.execute() print2= json.dumps(resp, indent=2) req = service.objects().list_next(req, resp)
except client.AccessTokenRefreshError: aviso= "The credentials have been revoked or expired, please re-run the application to re-authorize" form=SQLFORM(db.gfile) return dict(print1=print1,print2=print2, form=form)
14:49:26.005 Unable to store in FILE: /base/data/home/apps/s~merebafs/2.381697639759293929/applications/MRBFILE/controllers/default.py
Traceback (most recent call last):
File "/base/data/home/apps/s~merebafs/2.381697639759293929/gluon/restricted.py", line 224, in restricted
exec ccode in environment
File "/base/data/home/apps/s~merebafs/2.381697639759293929/applications/MRBFILE/controllers/default.py", line 12, in <module>
import httplib2
File "/base/data/home/apps/s~merebafs/2.381697639759293929/gluon/custom_import.py", line 86, in custom_importer
raise ImportError, 'Cannot import module %s' % str(e)
ImportError: Cannot import module 'httplib2'
respuesta= json.dumps(resp, indent=2)
fields_to_return = 'nextPageToken,items(name,size,contentType,metadata(my-key))' req = service.objects().list(bucket=_BUCKET_NAME, fields=fields_to_return)
#Este caso sólo para cuando se tienen muchos buckets. En el ejemplo no hace falta
# If you have too many items to list in one request, list_next() will # automatically handle paging with the pageToken. while req is not None: resp = req.execute()
respuesta= json.dumps(resp, indent=2)
req = service.objects().list_next(req, resp)
except client.AccessTokenRefreshError:
respuesta= "The credentials have been revoked or expired, please re-run the application to re-authorize" return dict(respuesta=respuesta)
client.objects().delete(
bucket=bucket_name,
object=object_name).execute()