-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathvertigo_upload3r.cs
128 lines (116 loc) · 4.75 KB
/
vertigo_upload3r.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
using System;
using System.IO;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Threading.Tasks;
using Microsoft.Azure.WebJobs;
using Microsoft.Azure.WebJobs.Extensions.Http;
using Microsoft.Azure.WebJobs.Host;
using Microsoft.WindowsAzure.Storage.Blob;
using Microsoft.WindowsAzure.Storage.Table;
namespace uploader
{
public static class vertigo_upload3r
{
[FunctionName("vertigo_upload3r")]
public static async Task<HttpResponseMessage> Run(
[HttpTrigger(AuthorizationLevel.Anonymous, "get", "post", Route = null)]HttpRequestMessage req,
[Blob("vertigostorage/uploaded/{rand-guid}")] CloudBlockBlob blob,
[Table("Files", Connection = "AzureWebJobsStorage")]CloudTable tableBinding,
TraceWriter log)
{
try
{
log.Info("C# HTTP trigger function processed a request.");
if (req.Content.Headers.ContentLength > 100_000_000)
{
return req.CreateErrorResponse(HttpStatusCode.RequestEntityTooLarge, "Files should be below 100MB.");
}
MemoryStream ms = new MemoryStream();
await req.Content.CopyToAsync(ms);
await blob.UploadFromStreamAsync(ms);
ms.Position = 0;
File file = new File();
await ParseFiles(ms, req.Content.Headers.ContentType, (string s, Stream st) => blob.UploadFromStream(st), file);
file.File_Url = blob.Uri.ToString();
file.RowKey = blob.Name.ToAzureKeyString();
TableOperation updateOperation = TableOperation.Insert(file);
TableResult result = tableBinding.Execute(updateOperation);
return req.CreateResponse(HttpStatusCode.OK, blob.Uri);
}catch (Exception e)
{
return req.CreateResponse(HttpStatusCode.InternalServerError, e.Message);
}
}
public static async Task ParseFiles(Stream data, MediaTypeHeaderValue contentType, Action<string, Stream> fileProcessor, File file)
{
var streamContent = new StreamContent(data);
streamContent.Headers.ContentType = contentType;
var provider = await streamContent.ReadAsMultipartAsync();
foreach (var httpContent in provider.Contents)
{
var fileName = httpContent.Headers.ContentDisposition.FileName;
if (!string.IsNullOrWhiteSpace(fileName))
{
using (Stream fileContents = await httpContent.ReadAsStreamAsync())
{
fileProcessor(fileName, fileContents);
}
} else
{
string content = await httpContent.ReadAsStringAsync();
switch (httpContent.Headers.ContentDisposition.Name.Trim('\"'))
{
case "fields[name]":
file.Name = content;
break;
case "fields[email]":
file.Email = content;
break;
case "fields[title]":
file.Title = content;
break;
case "fields[description]":
file.Description = content;
break;
case "fields[video_url]":
file.Video_Url = content;
break;
default:
break;
}
}
}
}
public static string ToAzureKeyString(this string str)
{
var sb = new StringBuilder();
foreach (var c in str
.Where(c => c != '/'
&& c != '\\'
&& c != '#'
&& c != '/'
&& c != '?'
&& !char.IsControl(c)))
sb.Append(c);
return sb.ToString();
}
public class File : TableEntity
{
public File()
{
this.PartitionKey = "Data";
this.Timestamp = DateTime.Now;
}
public string Name { get; set; }
public string Email { get; set; }
public string Title { get; set; }
public string Description { get; set; }
public string Video_Url { get; set; }
public string File_Url { get; set; }
}
}
}