+using System.IO;
+
namespace Pithos.Client.WPF
{
public class FileEntry
{
public string FileName { get; set; }
- public string FullPath { get; set; }
+ private string _fullPath;
+ public string FullPath
+ {
+ get { return _fullPath; }
+ set
+ {
+ _fullPath = value;
+ FileName = Path.GetFileName(value);
+ }
+ }
}
}
\ No newline at end of file
Taskbar.UsageMessage = "Using 15% of 50 GB";
- Taskbar.RecentFiles.AddRange(new[]
- {
- new FileEntry{FileName="Moo",FullPath=@"e:\Pithos\moo"} ,
- new FileEntry{FileName="Mee",FullPath=@"e:\Pithos\mee"}
- });
Taskbar.StatusMessage = "In Synch";
}
// </copyright>
// -----------------------------------------------------------------------
+using System.Collections.Concurrent;
using System.ComponentModel.Composition;
using System.Diagnostics;
using System.IO;
}
}
- private readonly IObservableCollection<FileEntry> _recentFiles = new BindableCollection<FileEntry>();
- public IObservableCollection<FileEntry> RecentFiles
+ private readonly ObservableConcurrentCollection<FileEntry> _recentFiles = new ObservableConcurrentCollection<FileEntry>();
+ public ObservableConcurrentCollection<FileEntry> RecentFiles
{
get { return _recentFiles; }
}
_events.Publish(new Notification { Title = "Pithos", Message = status, Level = level });
}
+
+ public void NotifyChangedFile(string filePath)
+ {
+ var entry = new FileEntry {FullPath=filePath};
+ IProducerConsumerCollection<FileEntry> files=this.RecentFiles;
+ FileEntry popped;
+ while (files.Count > 5)
+ files.TryTake(out popped);
+ files.TryAdd(entry);
+ }
}
}
public CloudActionType Action { get; set; }
public FileInfo LocalFile { get; set; }
public ObjectInfo CloudFile { get; set; }
+ public FileState FileState { get; set; }
public Lazy<string> LocalHash { get; private set; }
+ public Lazy<string> TopHash { get; set; }
public string OldFileName { get; set; }
public string OldPath { get; set; }
public string NewFileName { get; set; }
public string NewPath { get; set; }
+
public CloudAction(CloudActionType action, string oldPath, string oldFileName, string newFileName, string newPath)
{
Action = action;
LocalHash = new Lazy<string>(() => Signature.CalculateMD5(NewFileName), LazyThreadSafetyMode.ExecutionAndPublication);
}
- public CloudAction(CloudActionType action, FileInfo localFile, ObjectInfo cloudFile)
+ public CloudAction(CloudActionType action, FileInfo localFile, ObjectInfo cloudFile,FileState state)
{
Action = action;
LocalFile = localFile;
CloudFile = cloudFile;
- //Skip Hash calculation for folders
+ FileState = state;
if (LocalFile != null)
- LocalHash = new Lazy<string>(() => Signature.CalculateMD5(LocalFile.FullName), LazyThreadSafetyMode.ExecutionAndPublication);
+ {
+ LocalHash = new Lazy<string>(() => Signature.CalculateMD5(LocalFile.FullName),
+ LazyThreadSafetyMode.ExecutionAndPublication);
+ TopHash = new Lazy<string>(() => Signature.CalculateTreeHash(LocalFile.FullName, 4 * 1024 * 1024, "sha256")
+ .TopHash.ToHashString());
+ }
+ }
+
+ //Constructor for downloading files
+ public CloudAction(CloudActionType action, ObjectInfo cloudFile)
+ {
+ Action = action;
+ CloudFile = cloudFile;
}
}
using System.Collections.Generic;
using System.ComponentModel.Composition;
using System.Diagnostics;
+using System.Diagnostics.Contracts;
using System.IO;
using System.Linq;
using System.Text;
state.TriggeringChange == WatcherChangeTypes.Changed
))
return;
- UpdateFileStatus(state);
- UpdateOverlayStatus(state);
- UpdateFileChecksum(state);
- WorkflowAgent.Post(state);
+
+ try
+ {
+ UpdateFileStatus(state);
+ UpdateOverlayStatus(state);
+ UpdateFileChecksum(state);
+ WorkflowAgent.Post(state);
+ }
+ catch (IOException exc)
+ {
+ Trace.TraceWarning("File access error occured, retrying {0}\n{1}", state.Path, exc);
+ _agent.Post(state);
+ }
+ catch (Exception exc)
+ {
+ Trace.TraceWarning("Error occured while indexing{0. The file will be skipped}\n{1}", state.Path, exc);
+ }
}
private Dictionary<WatcherChangeTypes, FileStatus> _statusDict = new Dictionary<WatcherChangeTypes, FileStatus>
return state;
}
-
-
+ //Does the file exist in the container's local folder?
+ public bool Exists(string relativePath)
+ {
+ if (String.IsNullOrWhiteSpace(relativePath))
+ throw new ArgumentNullException("relativePath");
+ //A RootPath must be set before calling this method
+ if (String.IsNullOrWhiteSpace(RootPath))
+ throw new InvalidOperationException("RootPath was not set");
+ Contract.EndContractBlock();
+ //Create the absolute path by combining the RootPath with the relativePath
+ var absolutePath=Path.Combine(RootPath, relativePath);
+ //Is this a valid file?
+ if (File.Exists(absolutePath))
+ return true;
+ //Or a directory?
+ if (Directory.Exists(RootPath))
+ return true;
+ //Fail if it is neither
+ return false;
+ }
+ public FileInfo GetFileInfo(string relativePath)
+ {
+ if (String.IsNullOrWhiteSpace(relativePath))
+ throw new ArgumentNullException("relativePath");
+ //A RootPath must be set before calling this method
+ if (String.IsNullOrWhiteSpace(RootPath))
+ throw new InvalidOperationException("RootPath was not set");
+ Contract.EndContractBlock();
+
+ var absolutePath = Path.Combine(RootPath, relativePath);
+ Debug.Assert(File.Exists(absolutePath));
+
+ return new FileInfo(absolutePath);
+
+ }
}
}
var enqueueFiles = listObjects.ContinueWith(task =>
{
- //ListObjects failed at this point, need to reschedule
if (task.IsFaulted)
{
- Trace.TraceError("[FAIL] ListObjects in ProcessRemoteFiles with {0}",task.Exception);
+ //ListObjects failed at this point, need to reschedule
+ Trace.TraceError("[FAIL] ListObjects in ProcessRemoteFiles with {0}", task.Exception);
ProcessRemoteFiles(accountPath, since);
return;
}
var remoteObjects = task.Result;
- var remote=(from info in remoteObjects
+ var remote=from info in remoteObjects
let name=info.Name
where !name.EndsWith(".ignore",StringComparison.InvariantCultureIgnoreCase) &&
!name.StartsWith("fragments/",StringComparison.InvariantCultureIgnoreCase)
- select info)
- .Distinct(new ObjectInfoByNameComparer()).ToDictionary(info=> info.Name.ToLower(), info=>info);
+ select info;
- var commonObjects = new List<Tuple<ObjectInfo, FileInfo>>();
- var localFiles = new List<FileInfo>();
+ var commonObjects = new List<Tuple<ObjectInfo, FileInfo,FileState>>();
+ var remoteOnly = new List<ObjectInfo>();
//In order to avoid multiple iterations over the files, we iterate only once
- foreach (var fileInfo in FileAgent.EnumerateFileInfos())
+ //over the remote files
+ foreach (var objectInfo in remote)
{
- var relativeUrl=fileInfo.AsRelativeUrlTo(FileAgent.RootPath);
+ var relativePath= objectInfo.Name.RelativeUrlToFilePath();// fileInfo.AsRelativeUrlTo(FileAgent.RootPath);
//and remove any matching objects from the list, adding them to the commonObjects list
- if (remote.ContainsKey(relativeUrl))
+ if (FileAgent.Exists(relativePath))
{
- commonObjects.Add(Tuple.Create(remote[relativeUrl], fileInfo));
- remote.Remove(relativeUrl);
+ var localFile = FileAgent.GetFileInfo(relativePath);
+ var state=FileState.FindByFilePath(localFile.FullName);
+ commonObjects.Add(Tuple.Create(objectInfo, localFile,state));
}
else
//If there is no match we add them to the localFiles list
- localFiles.Add(fileInfo);
+ remoteOnly.Add(objectInfo);
}
//At the end of the iteration, the *remote* list will contain the files that exist
//only on the server
- //Local files should be uploaded
- var actionsForLocal = from localFile in localFiles
- select new CloudAction(CloudActionType.UploadUnconditional,
- localFile,
- ObjectInfo.Empty);
-
//Remote files should be downloaded
- var actionsForRemote = from dictPair in remote
- let upFile = dictPair.Value
- select new CloudAction(CloudActionType.DownloadUnconditional, null, upFile);
+ var actionsForRemote = from upFile in remoteOnly
+ select new CloudAction(CloudActionType.DownloadUnconditional,upFile);
//Common files should be checked on a per-case basis to detect differences, which is newer
var actionsForCommon = from pair in commonObjects
let objectInfo = pair.Item1
let localFile = pair.Item2
- select new CloudAction(CloudActionType.MustSynch, localFile, objectInfo);
+ let state=pair.Item3
+ select new CloudAction(CloudActionType.MustSynch,
+ localFile, objectInfo,state);
+
//Collect all the actions
- var allActions = actionsForLocal.Union(actionsForRemote).Union(actionsForCommon);
+ var allActions = actionsForRemote.Union(actionsForCommon);
//And remove those that are already being processed by the agent
var distinctActions =allActions
//Queue all the actions
_agent.AddFromEnumerable(distinctActions);
- StatusNotification.NotifyChange(String.Format("Processing {0} files", distinctActions.Count));
+ if(remoteOnly.Count>0)
+ StatusNotification.NotifyChange(String.Format("Processing {0} new files", remoteOnly.Count));
Trace.TraceInformation("[LISTENER] End Processing");
Trace.CorrelationManager.StopLogicalOperation();
switch (action.Action)
{
case CloudActionType.UploadUnconditional:
- UploadCloudFile(localFile, action.LocalHash.Value);
+ UploadCloudFile(localFile, action.LocalHash.Value,action.TopHash.Value);
break;
case CloudActionType.DownloadUnconditional:
DownloadCloudFile(_pithosContainer, new Uri(cloudFile.Name,UriKind.Relative), downloadPath);
break;
case CloudActionType.MustSynch:
if (File.Exists(downloadPath))
- {
+ {
var cloudHash = cloudFile.Hash;
var localHash = action.LocalHash.Value;
- if (!cloudHash.Equals(localHash, StringComparison.InvariantCultureIgnoreCase))
+ var topHash = action.TopHash.Value;
+ //Not enough to compare only the local hashes, also have to compare the tophashes
+ if (!cloudHash.Equals(localHash, StringComparison.InvariantCultureIgnoreCase) &&
+ !cloudHash.Equals(topHash, StringComparison.InvariantCultureIgnoreCase))
{
var lastLocalTime = localFile.LastWriteTime;
var lastUpTime = cloudFile.Last_Modified;
//Maybe need to store version as well, to check who has the latest version
//StatusKeeper.SetFileOverlayStatus(downloadPath, FileOverlayStatus.Conflict);
- UploadCloudFile(localFile, action.LocalHash.Value);
+ UploadCloudFile(localFile, action.LocalHash.Value,action.TopHash.Value);
}
else
{
StatusKeeper.StoreInfo(localPath, t.Result));
storeInfo.Wait();
+ StatusNotification.NotifyChangedFile(localPath);
}
}
- private void UploadCloudFile(FileInfo fileInfo, string hash)
+ private void UploadCloudFile(FileInfo fileInfo, string hash,string topHash)
{
if (fileInfo==null)
throw new ArgumentNullException("fileInfo");
var url = fileInfo.AsRelativeUrlTo(FileAgent.RootPath);
- using(var gate=NetworkGate.Acquire(fileInfo.FullName,NetworkOperation.Uploading))
+ var fullFileName = fileInfo.FullName;
+ using(var gate=NetworkGate.Acquire(fullFileName,NetworkOperation.Uploading))
{
//Abort if the file is already being uploaded or downloaded
if (gate.Failed)
var info = CloudClient.GetObjectInfo(_pithosContainer, url);
//If the file hashes match, abort the upload
- if (hash.Equals(info.Hash, StringComparison.InvariantCultureIgnoreCase))
+ if (hash.Equals(info.Hash, StringComparison.InvariantCultureIgnoreCase) ||
+ topHash.Equals(info.Hash, StringComparison.InvariantCultureIgnoreCase))
{
//but store any metadata changes
- this.StatusKeeper.StoreInfo(fileInfo.FullName, info);
- Trace.TraceInformation("Skip upload of {0}, hashes match", fileInfo.FullName);
+ this.StatusKeeper.StoreInfo(fullFileName, info);
+ Trace.TraceInformation("Skip upload of {0}, hashes match", fullFileName);
return;
}
//The hash will be stored under the fragments path, in the same relative path as to
//the pithos root path
var relativePath = fileInfo.AsRelativeTo(FileAgent.RootPath);
+/*
var hashPath = Path.Combine(FileAgent.FragmentsPath, relativePath) + ".hashmap";
//Load the hash or calculate a new one
var hashFileExists = File.Exists(hashPath);
Trace.TraceInformation("Skip upload of {0}, treehashes match", fileInfo.FullName);
return;
}
+*/
{
//Mark the file as modified while we upload it
var setStatus = Task.Factory.StartNew(() =>
- StatusKeeper.SetFileOverlayStatus(fileInfo.FullName,FileOverlayStatus.Modified));
+ StatusKeeper.SetFileOverlayStatus(fullFileName,FileOverlayStatus.Modified));
//And then upload it
var put = setStatus.ContinueWith(t =>
- CloudClient.PutObject(_pithosContainer,url,fileInfo.FullName, hash));
+ CloudClient.PutObject(_pithosContainer,url,fullFileName, hash));
});
putOrUpdate.Wait();
//If everything succeeds, change the file and overlay status to normal
- this.StatusKeeper.SetFileState(fileInfo.FullName, FileStatus.Unchanged, FileOverlayStatus.Normal);
+ this.StatusKeeper.SetFileState(fullFileName, FileStatus.Unchanged, FileOverlayStatus.Normal);
}
//Notify the Shell to update the overlays
- NativeMethods.RaiseChangeNotification(fileInfo.FullName);
+ NativeMethods.RaiseChangeNotification(fullFileName);
+ StatusNotification.NotifyChangedFile(fullFileName);
}
this.StatusKeeper.RemoveFileOverlayStatus(path);
return;
}
-
+ var fileState = FileState.FindByFilePath(path);
switch (state.Status)
{
case FileStatus.Created:
case FileStatus.Modified:
var info = new FileInfo(path);
- NetworkAgent.Post(new CloudAction(CloudActionType.UploadUnconditional, info, ObjectInfo.Empty));
+ NetworkAgent.Post(new CloudAction(CloudActionType.UploadUnconditional, info, ObjectInfo.Empty,fileState));
break;
case FileStatus.Deleted:
- NetworkAgent.Post(new CloudAction(CloudActionType.DeleteCloud, null, new ObjectInfo {Name=fileName}));
+ NetworkAgent.Post(new CloudAction(CloudActionType.DeleteCloud, null, new ObjectInfo {Name=fileName},fileState));
break;
case FileStatus.Renamed:
NetworkAgent.Post(new CloudAction(CloudActionType.RenameCloud, state.OldFileName,state.OldPath,state.FileName,state.Path));
// </copyright>
// -----------------------------------------------------------------------
+using System.Diagnostics.Contracts;
using System.IO;
using System.Threading.Tasks;
using Castle.ActiveRecord;
[Property]
public string TopHash { get; set; }
+ [Property]
+ public long? Version { get; set; }
+
+ [Property]
+ public DateTime? VersionTimeStamp { get; set; }
+
+
[HasMany(Cascade = ManyRelationCascadeEnum.AllDeleteOrphan, Lazy = true,Inverse=true)]
public IList<FileTag> Tags
{
// [Property]
// public byte[] HashmapHash { get; set; }
- public static Task<FileState> CreateForAsync(string filePath)
+ public static FileState FindByFilePath(string absolutePath)
+ {
+ return Queryable.FirstOrDefault(s => s.FilePath == absolutePath.ToLower());
+ }
+
+ public static Task<FileState> CreateForAsync(string filePath,int blockSize,string algorithm)
{
+ if (blockSize <= 0)
+ throw new ArgumentOutOfRangeException("blockSize");
+ if (String.IsNullOrWhiteSpace(algorithm))
+ throw new ArgumentNullException("algorithm");
+ Contract.EndContractBlock();
+
+
var fileState = new FileState
{
FilePath = filePath,
};
- return fileState.UpdateHashesAsync();
+ return fileState.UpdateHashesAsync(blockSize,algorithm);
}
- public Task<FileState> UpdateHashesAsync()
+ public Task<FileState> UpdateHashesAsync(int blockSize,string algorithm)
{
+ if (blockSize<=0)
+ throw new ArgumentOutOfRangeException("blockSize");
+ if (String.IsNullOrWhiteSpace(algorithm))
+ throw new ArgumentNullException("algorithm");
+ Contract.EndContractBlock();
+
//Skip updating the hash for folders
if (Directory.Exists(FilePath))
return Task.Factory.StartNew(() => this);
- return Task.Factory.StartNew(() => { Checksum = Signature.CalculateMD5(FilePath); })
+ return Task.Factory.StartNew(() =>
+ {
+ Checksum = Signature.CalculateMD5(FilePath);
+ TopHash =
+ Signature.CalculateTreeHash(FilePath, blockSize, algorithm)
+ .TopHash.ToHashString();
+ })
.ContinueWith(
t => this);
}
public interface IStatusNotification
{
void NotifyChange(string status,TraceLevel level=TraceLevel.Info);
+ void NotifyChangedFile(string filePath);
}
}
if (!Directory.Exists(_pithosDataPath))
Directory.CreateDirectory(_pithosDataPath);
- //File.Delete(Path.Combine(_pithosDataPath, "pithos.db"));
+ File.Delete(Path.Combine(_pithosDataPath, "pithos.db"));
var source = GetConfiguration(_pithosDataPath);
ActiveRecordStarter.Initialize(source,typeof(FileState),typeof(FileTag));
newFiles.ForAll(file =>
{
- var createState = FileState.CreateForAsync(file)
+ var createState = FileState.CreateForAsync(file,this.BlockSize,this.BlockHash)
.ContinueWith(state =>{
_persistenceAgent.Post(state.Result.Create);
return state.Result;
});
- Func<Guid, Task<TreeHash>> treeBuilder = (stateId) =>
+ /*Func<Guid, Task<TreeHash>> treeBuilder = (stateId) =>
Signature.CalculateTreeHashAsync(file, BlockSize, BlockHash)
.ContinueWith(treeTask =>
{
var treeHash = treeTask.Result;
treeHash.FileId = stateId;
return treeHash;
- });
+ });*/
- var createTree=createState.ContinueWith(stateTask =>
+ /* var createTree=createState.ContinueWith(stateTask =>
treeBuilder(stateTask.Result.Id))
.Unwrap();
- createTree.ContinueWith(treeTask =>
- treeTask.Result.Save(_pithosDataPath));
+ var saveTree=createTree.ContinueWith(treeTask =>
+ treeTask.Result.Save(_pithosDataPath));*/
});
return newFiles;
try
{
- var state = FileState.Queryable.FirstOrDefault(s => s.FilePath == path.ToLower());
+ var state = FileState.FindByFilePath(path);
return state == null ? defaultValue : getter(state);
}
catch (Exception exc)
using (new SessionScope())
{
var filePath = path.ToLower();
- var state = FileState.Queryable.FirstOrDefault(s => s.FilePath == filePath);
+ var state = FileState.FindByFilePath(filePath);
if (state != null)
{
setter(state);
{
var filePath = path.ToLower();
- var state = FileState.Queryable.FirstOrDefault(s => s.FilePath == filePath);
+ var state = FileState.FindByFilePath(filePath);
if (state == null)
{
Trace.TraceWarning("[NOFILE] Unable to set status for {0}.", filePath);
{
try
{
- var state = FileState.Queryable.FirstOrDefault(s => s.FilePath == path.ToLower());
+ var state = FileState.FindByFilePath(path);
return state == null ? FileOverlayStatus.Unversioned : state.OverlayStatus;
}
catch (Exception exc)
}
private static void InnerRenameFileOverlayStatus(string oldPath, string newPath)
- {
- var state = FileState.Queryable.FirstOrDefault(s => s.FilePath == oldPath);
+ {
+ var state = FileState.FindByFilePath(oldPath);
if (state == null)
{
{
if (String.IsNullOrWhiteSpace(path))
throw new ArgumentNullException("path", "path can't be empty");
-
+ Contract.EndContractBlock();
+
UpdateStatus(path,state=>
{
state.FileStatus = fileStatus;
throw new ArgumentNullException("path", "path can't be empty");
if (objectInfo==null)
throw new ArgumentNullException("objectInfo", "objectInfo can't be empty");
-
+ Contract.EndContractBlock();
_persistenceAgent.Post(() =>
{
{
//Forgetting to use a sessionscope results in two sessions being created, one by
//FirstOrDefault and one by Save()
- var state =
- FileState.Queryable.FirstOrDefault(s => s.FilePath == filePath);
+ var state =FileState.FindByFilePath(filePath);
//Create a new empty state object if this is a new file
state = state ?? new FileState();
state.FilePath = filePath;
state.Checksum = objectInfo.Hash;
+ state.Version = objectInfo.Version;
+ state.VersionTimeStamp = objectInfo.VersionTimestamp;
+
+ if(objectInfo.Bytes>BlockSize)
+ state.TopHash = objectInfo.Hash;
state.FileStatus = FileStatus.Unchanged;
state.OverlayStatus = FileOverlayStatus.Normal;
+
//Create a list of tags from the ObjectInfo's tag dictionary
//Make sure to bind each tag to its parent state so we don't have to save each tag separately
//state.Tags = (from pair in objectInfo.Tags
}
public FileStatus GetFileStatus(string path)
- {
- var state = FileState.Queryable.FirstOrDefault(s => s.FilePath == path.ToLower());
+ {
+ var state = FileState.FindByFilePath(path);
return (state==null)?FileStatus.Missing:state.FileStatus ;
}
{
using (new SessionScope())
{
- var state = FileState.Queryable.FirstOrDefault(s => s.FilePath == path);
+ var state = FileState.FindByFilePath(path);
if (state == null)
{
Trace.TraceWarning("[NOFILE] Unable to set checkesum for {0}.", path);
--- /dev/null
+static internal class KnownExtensions
+{
+ public const string X_Object_Version = "X-Object-Version";
+ public const string X_Object_Version_Timestamp = "X-Object-Version-Timestamp";
+ public const string X_Object_Modified_By = "X-Object-Modified-By";
+}
\ No newline at end of file
}
}
- public long? Version { get; set; }
- public DateTime? VersionTimeStamp { get; set; }
+ private long? _version;
+ public long? Version
+ {
+ get { return _version; }
+ set { _version = value; }
+ }
+
+ //Alias for version, for Json deserialization purposes
+ public long? X_Object_Version
+ {
+ get { return _version; }
+ set { _version = value; }
+ }
+
+ /* //Alias vor VersionTimestamp, for Json deserialization purposes
+ public DateTime? X_Object_Version_Timestamp
+ {
+ get { return _versionTimestamp; }
+ set { _versionTimestamp = value; }
+ }*/
+
+ private DateTime? _versionTimestamp;
+ public DateTime? VersionTimestamp
+ {
+ get { return _versionTimestamp; }
+ set { _versionTimestamp = value; }
+ }
+
+ public string ModifiedBy
+ {
+ get{ return _modifiedBy; }
+ set{ _modifiedBy = value; }
+ }
+
+ //Alias for ModifiedBy, for Json deserialization purposes
+ public string X_Object_Modified_By
+ {
+ get{ return _modifiedBy; }
+ set{ _modifiedBy = value; }
+ }
public Stream Stream { get; set; }
private void ExtractKnownExtensions()
{
- Version=GetLong("X-Object-Version");
- VersionTimeStamp = GetTimestamp("X-Object-Version-TimeStamp");
+ Version=GetLong(KnownExtensions.X_Object_Version);
+ VersionTimestamp = GetTimestamp(KnownExtensions.X_Object_Version_Timestamp);
+ ModifiedBy = GetString(KnownExtensions.X_Object_Modified_By);
+ }
+
+ private string GetString(string name)
+ {
+ var value=String.Empty;
+ _extensions.TryGetValue(name, out value);
+ return value ;
}
private long? GetLong(string name)
{
string version;
long value;
- if (_extensions.TryGetValue(name, out version) && long.TryParse(version, out value))
- {
- return value;
- }
- return null;
+ return _extensions.TryGetValue(name, out version) && long.TryParse(version, out value)
+ ? (long?) value
+ : null;
}
private DateTime? GetTimestamp(string name)
Content_Type = String.Empty,
Last_Modified = DateTime.MinValue
};
+
+ private string _modifiedBy;
}
}
\ No newline at end of file
<Compile Include="AccountSettings.cs" />
<Compile Include="IPithosSettings.cs" />
<Compile Include="IStatusChecker.cs" />
+ <Compile Include="KnownExtensions.cs" />
<Compile Include="ObjectInfo.cs" />
<Compile Include="PithosSettingsData.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
select new {Name = name, Value = client.ResponseHeaders[name]})
.ToDictionary(t => t.Name, t => t.Value);
var extensions = (from key in keys
- where key.StartsWith("X-Object-") && !key.StartsWith("X-Object-Meta-")
- let name = key.Substring(9)
- select new {Name = name, Value = client.ResponseHeaders[name]})
+ where key.StartsWith("X-Object-") && !key.StartsWith("X-Object-Meta-")
+ select new {Name = key, Value = client.ResponseHeaders[key]})
.ToDictionary(t => t.Name, t => t.Value);
- return new ObjectInfo
- {
- Name = objectName,
- /*Bytes =
- long.Parse(client.GetHeaderValue("Content-Length")),*/
- Hash = client.GetHeaderValue("ETag"),
- Content_Type = client.GetHeaderValue("Content-Type"),
- Tags = tags,
- Extensions = extensions
- };
+ var info = new ObjectInfo
+ {
+ Name = objectName,
+ Hash = client.GetHeaderValue("ETag"),
+ Content_Type = client.GetHeaderValue("Content-Type"),
+ Tags = tags,
+ Last_Modified = client.LastModified,
+ Extensions = extensions
+ };
+ return info;
case HttpStatusCode.NotFound:
return ObjectInfo.Empty;
default:
{
Trace.TraceInformation("[PUT PROGRESS] {0} {1}% {2} of {3}", fileName, args.ProgressPercentage, args.BytesSent, args.TotalBytesToSend);
};
-
+
+ client.UploadFileCompleted += (sender, args) =>
+ {
+ Trace.TraceInformation("[PUT PROGRESS] Completed {0}", fileName);
+ };
return client.UploadFileTask(uri, "PUT", fileName)
.ContinueWith(upload=>
{
{
var response = (HttpWebResponse)base.GetWebResponse(request);
StatusCode = response.StatusCode;
- StatusDescription = response.StatusDescription;
+ LastModified=response.LastModified;
+ StatusDescription = response.StatusDescription;
return response;
}
catch (WebException exc)
}
}
+ public DateTime LastModified { get; private set; }
+
private static string GetContent(WebResponse webResponse)
{
string content;
string hash;
using (var hasher = MD5.Create())
- using (var stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite, 4096, true))
+ using (var stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true))
{
var hashBytes = hasher.ComputeHash(stream);
hash = hashBytes.ToHashString();
public static string ToHashString(this byte[] hashBytes)
{
var shb = new SoapHexBinary(hashBytes);
- return shb.ToString();
+ return shb.ToString().ToLower();
}