问下大家买房子国内家里帮忙出资的钱怎么转过来呢。。。# Living
D*0
1 楼
网上申请的,回复的挺快,安排了code challenge,一道题,不限时,半个小时写完了
,发过去,第二天收到了thank you but 88.不知道哪里的问题。
* Write a function that takes two parameters:
* (1) a String representing a text document and
* (2) an integer providing the number of items to return.
* Implement the function such that it returns a list of Strings ordered by
word frequency,
* the most frequently occurring word first.
* Use your best judgement to decide how words are separated.
* Your solution should run in O(n) time where n is the number of characters
in the document.
这是我的solution,给了两种:
public class getTopKFrequentWords {
/* static class PQsort implements Comparator>{
@Override
public int compare(Entry arg0,
Entry arg1) {
// TODO Auto-generated method stub
return arg1.getValue() - arg0.getValue();
}
}*/
/* this function treats words with the same frequency differently
* 1. create a hashmap with, then split the input
string into words, scan these words to
* fill out the hashmap. O(n)
* 2. create a size k min-heap(in java priority queue), iterate the
hashmap, and put each
* pair to the min-heap, which sorts based on the frequency. O(mlogk) m
is the total number of words
* 3. fill out a string list with the result min-heap in the above step.
O(klogk)
*
* Total time complexity : O(n) + O(mlogk) + O(klogk) = O(n)
*/
public static LinkedList getTopKFrequentWords(final String doc,
final int k)
{
LinkedList ret = new LinkedList();
if(k <= 0 || "".equals(doc)) return ret;
HashMap map = new HashMap();
/* // I use white space and as delimiter
String[] words = doc.split("(\s|\p{Punct})+");
for(String word : words)
{
int count = 0;
if(map.containsKey(word))
{
count = map.get(word);
}
count++;
map.put(word, count);
}*/
StringBuilder word = new StringBuilder();
String lowercaseDoc = doc.toLowerCase();
for(int i=0; i {
if(isLetter(lowercaseDoc.charAt(i)))
{
word.append(lowercaseDoc.charAt(i));
}
else
{
// we have a word
if(word.length() > 0)
{
int count = 0;
if(map.containsKey(word.toString()))
{
count = map.get(word.toString());
}
count++;
map.put(word.toString(), count);
word = new StringBuilder();
}
}
}
// the last word
if(word.length() > 0)
{
int count = 0;
if(map.containsKey(word))
{
count = map.get(word);
}
count++;
map.put(word.toString(), count);
word = new StringBuilder();
}
PriorityQueue> minHeap = new
PriorityQueue>(k, new Comparator , Integer>>(){
@Override
public int compare(Map.Entry entry1, Map.Entry<
String, Integer> entry2)
{
return entry2.getValue() - entry1.getValue();
}
});
for(Map.Entry entry : map.entrySet())
{
System.out.println(entry.getKey() + "-->" + entry.getValue());
minHeap.offer(entry);
}
int i = 0;
while(i < k && minHeap.size() > 0)
{
//System.out.println(minHeap.peek().getKey() + "-->" + minHeap.
peek().getValue());
ret.add(minHeap.poll().getKey());
i++;
}
return ret;
}
/* this function gets all the words with the same frequency
* 1. create a hashmap with, then split the input
string into words, scan these words to
* fill out the hashmap. O(n)
* 2. create a hasmap with>, then scan the above
map to fill out this map. O(m) m is the total number of words
* 2. create a size k min-heap(in java priority queue), iterate the
hashmap, and put each>
* pair to the min-heap, which sorts based on the frequency. O(flogk) f
is total number of frequencies
* 3. fill out a string list with the result min-heap in the above step.
O(klogk)
*
* Total time complexity : O(n) + O(m) + O(flogk) + O(klogk) = O(n)
*/
public static LinkedList getTopKFrequentWords2(final String doc,
final int k)
{
LinkedList ret = new LinkedList();
if(k <= 0 || "".equals(doc)) return ret;
HashMap map = new HashMap();
// I use white space and as delimiter
/* String[] words = doc.split("(\s|\p{Punct})+");
for(String word : words)
{
int count = 0;
if(map.containsKey(word))
{
count = map.get(word);
}
count++;
map.put(word, count);
}*/
StringBuilder word = new StringBuilder();
String lowercaseDoc = doc.toLowerCase();
for(int i=0; i {
if(isLetter(lowercaseDoc.charAt(i)))
{
word.append(lowercaseDoc.charAt(i));
}
else
{
// we have a word
if(word.length() > 0)
{
int count = 0;
if(map.containsKey(word.toString()))
{
count = map.get(word.toString());
}
count++;
map.put(word.toString(), count);
word = new StringBuilder();
}
}
}
// the last word
if(word.length() > 0)
{
int count = 0;
if(map.containsKey(word.toString()))
{
count = map.get(word.toString());
}
count++;
map.put(word.toString(), count);
}
PriorityQueue>> minHeap = new
PriorityQueue>>(k, new Comparator Entry>>(){
@Override
public int compare(Map.Entry> entry1
, Map.Entry> entry2)
{
return entry2.getKey() - entry1.getKey();
}
});
HashMap> statMap = new HashMap LinkedList>();
for(Map.Entry entry : map.entrySet())
{
LinkedList list = null;
if(statMap.containsKey(entry.getValue()))
{
list = statMap.get(entry.getValue());
}
else
{
list = new LinkedList();
}
list.add(entry.getKey());
statMap.put(entry.getValue(), list);
}
for(Map.Entry> entry : statMap.entrySet(
))
{
System.out.println(entry.getKey() + "-->" + entry.getValue());
minHeap.offer(entry);
}
int i = 0;
while(i < k && minHeap.size() > 0)
{
//System.out.println(minHeap.peek().getKey() + "-->" + minHeap.
peek().getValue());
ret.addAll(minHeap.poll().getValue());
i++;
}
return ret;
}
public static boolean isLetter(final char ch)
{
return (ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z');
}
}
难道不是o(n)?
,发过去,第二天收到了thank you but 88.不知道哪里的问题。
* Write a function that takes two parameters:
* (1) a String representing a text document and
* (2) an integer providing the number of items to return.
* Implement the function such that it returns a list of Strings ordered by
word frequency,
* the most frequently occurring word first.
* Use your best judgement to decide how words are separated.
* Your solution should run in O(n) time where n is the number of characters
in the document.
这是我的solution,给了两种:
public class getTopKFrequentWords {
/* static class PQsort implements Comparator
@Override
public int compare(Entry
Entry
// TODO Auto-generated method stub
return arg1.getValue() - arg0.getValue();
}
}*/
/* this function treats words with the same frequency differently
* 1. create a hashmap with
string into words, scan these words to
* fill out the hashmap. O(n)
* 2. create a size k min-heap(in java priority queue), iterate the
hashmap, and put each
* pair to the min-heap, which sorts based on the frequency. O(mlogk) m
is the total number of words
* 3. fill out a string list with the result min-heap in the above step.
O(klogk)
*
* Total time complexity : O(n) + O(mlogk) + O(klogk) = O(n)
*/
public static LinkedList
final int k)
{
LinkedList
if(k <= 0 || "".equals(doc)) return ret;
HashMap
/* // I use white space and as delimiter
String[] words = doc.split("(\s|\p{Punct})+");
for(String word : words)
{
int count = 0;
if(map.containsKey(word))
{
count = map.get(word);
}
count++;
map.put(word, count);
}*/
StringBuilder word = new StringBuilder();
String lowercaseDoc = doc.toLowerCase();
for(int i=0; i
if(isLetter(lowercaseDoc.charAt(i)))
{
word.append(lowercaseDoc.charAt(i));
}
else
{
// we have a word
if(word.length() > 0)
{
int count = 0;
if(map.containsKey(word.toString()))
{
count = map.get(word.toString());
}
count++;
map.put(word.toString(), count);
word = new StringBuilder();
}
}
}
// the last word
if(word.length() > 0)
{
int count = 0;
if(map.containsKey(word))
{
count = map.get(word);
}
count++;
map.put(word.toString(), count);
word = new StringBuilder();
}
PriorityQueue
PriorityQueue
@Override
public int compare(Map.Entry
String, Integer> entry2)
{
return entry2.getValue() - entry1.getValue();
}
});
for(Map.Entry
{
System.out.println(entry.getKey() + "-->" + entry.getValue());
minHeap.offer(entry);
}
int i = 0;
while(i < k && minHeap.size() > 0)
{
//System.out.println(minHeap.peek().getKey() + "-->" + minHeap.
peek().getValue());
ret.add(minHeap.poll().getKey());
i++;
}
return ret;
}
/* this function gets all the words with the same frequency
* 1. create a hashmap with
string into words, scan these words to
* fill out the hashmap. O(n)
* 2. create a hasmap with
map to fill out this map. O(m) m is the total number of words
* 2. create a size k min-heap(in java priority queue), iterate the
hashmap, and put each
* pair to the min-heap, which sorts based on the frequency. O(flogk) f
is total number of frequencies
* 3. fill out a string list with the result min-heap in the above step.
O(klogk)
*
* Total time complexity : O(n) + O(m) + O(flogk) + O(klogk) = O(n)
*/
public static LinkedList
final int k)
{
LinkedList
if(k <= 0 || "".equals(doc)) return ret;
HashMap
// I use white space and as delimiter
/* String[] words = doc.split("(\s|\p{Punct})+");
for(String word : words)
{
int count = 0;
if(map.containsKey(word))
{
count = map.get(word);
}
count++;
map.put(word, count);
}*/
StringBuilder word = new StringBuilder();
String lowercaseDoc = doc.toLowerCase();
for(int i=0; i
if(isLetter(lowercaseDoc.charAt(i)))
{
word.append(lowercaseDoc.charAt(i));
}
else
{
// we have a word
if(word.length() > 0)
{
int count = 0;
if(map.containsKey(word.toString()))
{
count = map.get(word.toString());
}
count++;
map.put(word.toString(), count);
word = new StringBuilder();
}
}
}
// the last word
if(word.length() > 0)
{
int count = 0;
if(map.containsKey(word.toString()))
{
count = map.get(word.toString());
}
count++;
map.put(word.toString(), count);
}
PriorityQueue
PriorityQueue
@Override
public int compare(Map.Entry
, Map.Entry
{
return entry2.getKey() - entry1.getKey();
}
});
HashMap
for(Map.Entry
{
LinkedList
if(statMap.containsKey(entry.getValue()))
{
list = statMap.get(entry.getValue());
}
else
{
list = new LinkedList
}
list.add(entry.getKey());
statMap.put(entry.getValue(), list);
}
for(Map.Entry
))
{
System.out.println(entry.getKey() + "-->" + entry.getValue());
minHeap.offer(entry);
}
int i = 0;
while(i < k && minHeap.size() > 0)
{
//System.out.println(minHeap.peek().getKey() + "-->" + minHeap.
peek().getValue());
ret.addAll(minHeap.poll().getValue());
i++;
}
return ret;
}
public static boolean isLetter(final char ch)
{
return (ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z');
}
}
难道不是o(n)?